aboutsummaryrefslogtreecommitdiff
path: root/src/client/views/nodes/RecordingBox/RecordingView.tsx
diff options
context:
space:
mode:
authorljungster <parkerljung@gmail.com>2022-08-09 11:52:07 -0500
committerljungster <parkerljung@gmail.com>2022-08-09 11:52:07 -0500
commitda3cb00f809a482a9fdf732f6a656fbc467cce27 (patch)
tree9eb1fd278bc71d080d71bbfb7e3aec482d35f439 /src/client/views/nodes/RecordingBox/RecordingView.tsx
parent1638527259a072dfc2ab286bd27bbb1751e8434e (diff)
parent26670c8b9eb6e2fd981c3a0997bff5556b60504b (diff)
Merge branch 'parker' of https://github.com/brown-dash/Dash-Web into parker
Diffstat (limited to 'src/client/views/nodes/RecordingBox/RecordingView.tsx')
-rw-r--r--src/client/views/nodes/RecordingBox/RecordingView.tsx271
1 files changed, 271 insertions, 0 deletions
diff --git a/src/client/views/nodes/RecordingBox/RecordingView.tsx b/src/client/views/nodes/RecordingBox/RecordingView.tsx
new file mode 100644
index 000000000..ec5917b9e
--- /dev/null
+++ b/src/client/views/nodes/RecordingBox/RecordingView.tsx
@@ -0,0 +1,271 @@
+import * as React from 'react';
+import "./RecordingView.scss";
+import { useEffect, useRef, useState } from "react";
+import { ProgressBar } from "./ProgressBar"
+import { MdBackspace } from 'react-icons/md';
+import { FaCheckCircle } from 'react-icons/fa';
+import { IconContext } from "react-icons";
+import { Networking } from '../../../Network';
+import { Upload } from '../../../../server/SharedMediaTypes';
+import { returnFalse, returnTrue, setupMoveUpEvents } from '../../../../Utils';
+import { Presentation, TrackMovements } from '../../../util/TrackMovements';
+
+export interface MediaSegment {
+ videoChunks: any[],
+ endTime: number,
+ startTime: number,
+ presentation?: Presentation,
+}
+
+interface IRecordingViewProps {
+ setResult: (info: Upload.AccessPathInfo, presentation?: Presentation) => void
+ setDuration: (seconds: number) => void
+ id: string
+}
+
+const MAXTIME = 100000;
+
+export function RecordingView(props: IRecordingViewProps) {
+
+ const [recording, setRecording] = useState(false);
+ const recordingTimerRef = useRef<number>(0);
+ const [recordingTimer, setRecordingTimer] = useState(0); // unit is 0.01 second
+ const [playing, setPlaying] = useState(false);
+ const [progress, setProgress] = useState(0);
+
+ // acts as a "refresh state" to tell progressBar when to undo
+ const [doUndo, setDoUndo] = useState(false);
+ // whether an undo can occur or not
+ const [canUndo, setCanUndo] = useState(false);
+
+ const [videos, setVideos] = useState<MediaSegment[]>([]);
+ const [orderVideos, setOrderVideos] = useState<boolean>(false);
+ const videoRecorder = useRef<MediaRecorder | null>(null);
+ const videoElementRef = useRef<HTMLVideoElement | null>(null);
+
+ const [finished, setFinished] = useState<boolean>(false);
+ const [trackScreen, setTrackScreen] = useState<boolean>(false);
+
+
+
+ const DEFAULT_MEDIA_CONSTRAINTS = {
+ video: {
+ width: 1280,
+ height: 720,
+
+ },
+ audio: {
+ echoCancellation: true,
+ noiseSuppression: true,
+ sampleRate: 44100
+ }
+ };
+
+ useEffect(() => {
+ if (finished) {
+ // make the total presentation that'll match the concatted video
+ let concatPres = trackScreen && TrackMovements.Instance.concatPresentations(videos.map(v => v.presentation as Presentation));
+
+ // this async function uses the server to create the concatted video and then sets the result to it's accessPaths
+ (async () => {
+ const videoFiles = videos.map((vid, i) => new File(vid.videoChunks, `segvideo${i}.mkv`, { type: vid.videoChunks[0].type, lastModified: Date.now() }));
+
+ // upload the segments to the server and get their server access paths
+ const serverPaths: string[] = (await Networking.UploadFilesToServer(videoFiles))
+ .map(res => (res.result instanceof Error) ? '' : res.result.accessPaths.agnostic.server)
+
+ // concat the segments together using post call
+ const result: Upload.AccessPathInfo | Error = await Networking.PostToServer('/concatVideos', serverPaths);
+ !(result instanceof Error) ? props.setResult(result, concatPres || undefined) : console.error("video conversion failed");
+ })();
+ }
+ }, [videos]);
+
+ // this will call upon the progress bar to edit videos to be in the correct order
+ useEffect(() => {
+ finished && setOrderVideos(true);
+ }, [finished]);
+
+ // check if the browser supports media devices on first load
+ useEffect(() => { if (!navigator.mediaDevices) alert('This browser does not support getUserMedia.'); }, []);
+
+ useEffect(() => {
+ let interval: any = null;
+ if (recording) {
+ interval = setInterval(() => {
+ setRecordingTimer(unit => unit + 1);
+ }, 10);
+ } else if (!recording && recordingTimer !== 0) {
+ clearInterval(interval);
+ }
+ return () => clearInterval(interval);
+ }, [recording]);
+
+ useEffect(() => {
+ setVideoProgressHelper(recordingTimer)
+ recordingTimerRef.current = recordingTimer;
+ }, [recordingTimer]);
+
+ const setVideoProgressHelper = (progress: number) => {
+ const newProgress = (progress / MAXTIME) * 100;
+ setProgress(newProgress);
+ }
+
+ const startShowingStream = async (mediaConstraints = DEFAULT_MEDIA_CONSTRAINTS) => {
+ const stream = await navigator.mediaDevices.getUserMedia(mediaConstraints);
+
+ videoElementRef.current!.src = "";
+ videoElementRef.current!.srcObject = stream;
+ videoElementRef.current!.muted = true;
+
+ return stream;
+ }
+
+ const record = async () => {
+ // don't need to start a new stream every time we start recording a new segment
+ if (!videoRecorder.current) videoRecorder.current = new MediaRecorder(await startShowingStream());
+
+ // temporary chunks of video
+ let videoChunks: any = [];
+
+ videoRecorder.current.ondataavailable = (event: any) => {
+ if (event.data.size > 0) videoChunks.push(event.data);
+ };
+
+ videoRecorder.current.onstart = (event: any) => {
+ setRecording(true);
+ // start the recording api when the video recorder starts
+ trackScreen && TrackMovements.Instance.start();
+ };
+
+ videoRecorder.current.onstop = () => {
+ // if we have a last portion
+ if (videoChunks.length > 1) {
+ // append the current portion to the video pieces
+ const nextVideo = {
+ videoChunks,
+ endTime: recordingTimerRef.current,
+ startTime: videos?.lastElement()?.endTime || 0
+ };
+
+ // depending on if a presenation exists, add it to the video
+ const presentation = TrackMovements.Instance.yieldPresentation();
+ setVideos(videos => [...videos, (presentation != null && trackScreen) ? { ...nextVideo, presentation } : nextVideo]);
+ }
+
+ // reset the temporary chunks
+ videoChunks = [];
+ setRecording(false);
+ }
+
+ videoRecorder.current.start(200);
+ }
+
+
+ // if this is called, then we're done recording all the segments
+ const finish = (e: React.PointerEvent) => {
+ e.stopPropagation();
+
+ // call stop on the video recorder if active
+ videoRecorder.current?.state !== "inactive" && videoRecorder.current?.stop();
+
+ // end the streams (audio/video) to remove recording icon
+ const stream = videoElementRef.current!.srcObject;
+ stream instanceof MediaStream && stream.getTracks().forEach(track => track.stop());
+
+ // finish/clear the recoringApi
+ TrackMovements.Instance.finish();
+
+ // this will call upon progessbar to update videos to be in the correct order
+ setFinished(true);
+ }
+
+ const pause = (e: React.PointerEvent) => {
+ e.stopPropagation();
+ // if recording, then this is just a new segment
+ videoRecorder.current?.state === "recording" && videoRecorder.current.stop();
+ }
+
+ const start = (e: React.PointerEvent) => {
+ setupMoveUpEvents({}, e, returnTrue, returnFalse, e => {
+ // start recording if not already recording
+ if (!videoRecorder.current || videoRecorder.current.state === "inactive") record();
+
+ return true; // cancels propagation to documentView to avoid selecting it.
+ }, false, false);
+ }
+
+ const undoPrevious = (e: React.PointerEvent) => {
+ e.stopPropagation();
+ setDoUndo(prev => !prev);
+ }
+
+ const handleOnTimeUpdate = () => { playing && setVideoProgressHelper(videoElementRef.current!.currentTime); };
+
+ const millisecondToMinuteSecond = (milliseconds: number) => {
+ const toTwoDigit = (digit: number) => {
+ return String(digit).length == 1 ? "0" + digit : digit
+ }
+ const minutes = Math.floor((milliseconds % (1000 * 60 * 60)) / (1000 * 60));
+ const seconds = Math.floor((milliseconds % (1000 * 60)) / 1000);
+ return toTwoDigit(minutes) + " : " + toTwoDigit(seconds);
+ }
+
+ return (
+ <div className="recording-container">
+ <div className="video-wrapper">
+ <video id={`video-${props.id}`}
+ autoPlay
+ muted
+ onTimeUpdate={() => handleOnTimeUpdate()}
+ ref={videoElementRef}
+ />
+ <div className="recording-sign">
+ <span className="dot" />
+ <p className="timer">{millisecondToMinuteSecond(recordingTimer * 10)}</p>
+ </div>
+ <div className="controls">
+
+ <div className="controls-inner-container">
+ <div className="record-button-wrapper">
+ {recording ?
+ <button className="stop-button" onPointerDown={pause} /> :
+ <button className="record-button" onPointerDown={start} />
+ }
+ </div>
+
+ {!recording && (videos.length > 0 ?
+
+ <div className="options-wrapper video-edit-wrapper">
+ <IconContext.Provider value={{ color: "grey", className: "video-edit-buttons", style: { display: canUndo ? 'inherit' : 'none' } }}>
+ <MdBackspace onPointerDown={undoPrevious} />
+ </IconContext.Provider>
+ <IconContext.Provider value={{ color: "#cc1c08", className: "video-edit-buttons" }}>
+ <FaCheckCircle onPointerDown={finish} />
+ </IconContext.Provider>
+ </div>
+
+ : <div className="options-wrapper track-screen-wrapper">
+ <label className="track-screen">
+ <input type="checkbox" checked={trackScreen} onChange={(e) => { setTrackScreen(e.target.checked) }} />
+ <span className="checkmark"></span>
+ Track Screen
+ </label>
+ </div>)}
+
+ </div>
+
+ </div>
+
+ <ProgressBar
+ videos={videos}
+ setVideos={setVideos}
+ orderVideos={orderVideos}
+ progress={progress}
+ recording={recording}
+ doUndo={doUndo}
+ setCanUndo={setCanUndo}
+ />
+ </div>
+ </div>)
+} \ No newline at end of file