diff options
Diffstat (limited to 'src/client/views/nodes/RecordingBox/RecordingView.tsx')
-rw-r--r-- | src/client/views/nodes/RecordingBox/RecordingView.tsx | 227 |
1 files changed, 108 insertions, 119 deletions
diff --git a/src/client/views/nodes/RecordingBox/RecordingView.tsx b/src/client/views/nodes/RecordingBox/RecordingView.tsx index b95335792..ec5917b9e 100644 --- a/src/client/views/nodes/RecordingBox/RecordingView.tsx +++ b/src/client/views/nodes/RecordingBox/RecordingView.tsx @@ -1,23 +1,24 @@ import * as React from 'react'; import "./RecordingView.scss"; -import { ReactElement, useCallback, useEffect, useRef, useState } from "react"; +import { useEffect, useRef, useState } from "react"; import { ProgressBar } from "./ProgressBar" import { MdBackspace } from 'react-icons/md'; import { FaCheckCircle } from 'react-icons/fa'; import { IconContext } from "react-icons"; import { Networking } from '../../../Network'; import { Upload } from '../../../../server/SharedMediaTypes'; +import { returnFalse, returnTrue, setupMoveUpEvents } from '../../../../Utils'; +import { Presentation, TrackMovements } from '../../../util/TrackMovements'; -import { RecordingApi } from '../../../util/RecordingApi'; -import { emptyFunction, returnFalse, returnTrue, setupMoveUpEvents } from '../../../../Utils'; - -interface MediaSegment { +export interface MediaSegment { videoChunks: any[], - endTime: number + endTime: number, + startTime: number, + presentation?: Presentation, } interface IRecordingViewProps { - setResult: (info: Upload.FileInformation, trackScreen: boolean) => void + setResult: (info: Upload.AccessPathInfo, presentation?: Presentation) => void setDuration: (seconds: number) => void id: string } @@ -32,12 +33,18 @@ export function RecordingView(props: IRecordingViewProps) { const [playing, setPlaying] = useState(false); const [progress, setProgress] = useState(0); + // acts as a "refresh state" to tell progressBar when to undo + const [doUndo, setDoUndo] = useState(false); + // whether an undo can occur or not + const [canUndo, setCanUndo] = useState(false); + const [videos, setVideos] = useState<MediaSegment[]>([]); + const [orderVideos, setOrderVideos] = useState<boolean>(false); const videoRecorder = useRef<MediaRecorder | null>(null); const videoElementRef = useRef<HTMLVideoElement | null>(null); - const [finished, setFinished] = useState<boolean>(false) - const [trackScreen, setTrackScreen] = useState<boolean>(true) + const [finished, setFinished] = useState<boolean>(false); + const [trackScreen, setTrackScreen] = useState<boolean>(false); @@ -45,53 +52,42 @@ export function RecordingView(props: IRecordingViewProps) { video: { width: 1280, height: 720, + }, audio: { echoCancellation: true, noiseSuppression: true, sampleRate: 44100 } - } + }; useEffect(() => { - if (finished) { - props.setDuration(recordingTimer * 100) - let allVideoChunks: any = [] - videos.forEach((vid) => { - console.log(vid.videoChunks) - allVideoChunks = allVideoChunks.concat(vid.videoChunks) - }) - - const videoFile = new File(allVideoChunks, "video.mkv", { type: allVideoChunks[0].type, lastModified: Date.now() }); - - Networking.UploadFilesToServer(videoFile) - .then((data) => { - const result = data[0].result - if (!(result instanceof Error)) { // convert this screenshotBox into normal videoBox - props.setResult(result, trackScreen) - } else { - alert("video conversion failed"); - } - }) - - } + // make the total presentation that'll match the concatted video + let concatPres = trackScreen && TrackMovements.Instance.concatPresentations(videos.map(v => v.presentation as Presentation)); + // this async function uses the server to create the concatted video and then sets the result to it's accessPaths + (async () => { + const videoFiles = videos.map((vid, i) => new File(vid.videoChunks, `segvideo${i}.mkv`, { type: vid.videoChunks[0].type, lastModified: Date.now() })); - }, [finished]) + // upload the segments to the server and get their server access paths + const serverPaths: string[] = (await Networking.UploadFilesToServer(videoFiles)) + .map(res => (res.result instanceof Error) ? '' : res.result.accessPaths.agnostic.server) - useEffect(() => { - // check if the browser supports media devices on first load - if (!navigator.mediaDevices) { - console.log('This browser does not support getUserMedia.') + // concat the segments together using post call + const result: Upload.AccessPathInfo | Error = await Networking.PostToServer('/concatVideos', serverPaths); + !(result instanceof Error) ? props.setResult(result, concatPres || undefined) : console.error("video conversion failed"); + })(); } - console.log('This device has the correct media devices.') - }, []) + }, [videos]); + // this will call upon the progress bar to edit videos to be in the correct order useEffect(() => { - // get access to the video element on every render - videoElementRef.current = document.getElementById(`video-${props.id}`) as HTMLVideoElement; - }) + finished && setOrderVideos(true); + }, [finished]); + + // check if the browser supports media devices on first load + useEffect(() => { if (!navigator.mediaDevices) alert('This browser does not support getUserMedia.'); }, []); useEffect(() => { let interval: any = null; @@ -103,121 +99,108 @@ export function RecordingView(props: IRecordingViewProps) { clearInterval(interval); } return () => clearInterval(interval); - }, [recording]) + }, [recording]); useEffect(() => { setVideoProgressHelper(recordingTimer) recordingTimerRef.current = recordingTimer; - }, [recordingTimer]) + }, [recordingTimer]); const setVideoProgressHelper = (progress: number) => { const newProgress = (progress / MAXTIME) * 100; - setProgress(newProgress) + setProgress(newProgress); } + const startShowingStream = async (mediaConstraints = DEFAULT_MEDIA_CONSTRAINTS) => { - const stream = await navigator.mediaDevices.getUserMedia(mediaConstraints) + const stream = await navigator.mediaDevices.getUserMedia(mediaConstraints); - videoElementRef.current!.src = "" - videoElementRef.current!.srcObject = stream - videoElementRef.current!.muted = true + videoElementRef.current!.src = ""; + videoElementRef.current!.srcObject = stream; + videoElementRef.current!.muted = true; - return stream + return stream; } const record = async () => { - const stream = await startShowingStream(); - videoRecorder.current = new MediaRecorder(stream) + // don't need to start a new stream every time we start recording a new segment + if (!videoRecorder.current) videoRecorder.current = new MediaRecorder(await startShowingStream()); // temporary chunks of video - let videoChunks: any = [] + let videoChunks: any = []; videoRecorder.current.ondataavailable = (event: any) => { - if (event.data.size > 0) { - videoChunks.push(event.data) - } - } + if (event.data.size > 0) videoChunks.push(event.data); + }; videoRecorder.current.onstart = (event: any) => { setRecording(true); - trackScreen && RecordingApi.Instance.start(); - } + // start the recording api when the video recorder starts + trackScreen && TrackMovements.Instance.start(); + }; videoRecorder.current.onstop = () => { // if we have a last portion if (videoChunks.length > 1) { // append the current portion to the video pieces - setVideos(videos => [...videos, { videoChunks: videoChunks, endTime: recordingTimerRef.current }]) + const nextVideo = { + videoChunks, + endTime: recordingTimerRef.current, + startTime: videos?.lastElement()?.endTime || 0 + }; + + // depending on if a presenation exists, add it to the video + const presentation = TrackMovements.Instance.yieldPresentation(); + setVideos(videos => [...videos, (presentation != null && trackScreen) ? { ...nextVideo, presentation } : nextVideo]); } // reset the temporary chunks - videoChunks = [] + videoChunks = []; setRecording(false); - setFinished(true); - trackScreen && RecordingApi.Instance.pause(); } - // recording paused - videoRecorder.current.onpause = (event: any) => { - // append the current portion to the video pieces - setVideos(videos => [...videos, { videoChunks: videoChunks, endTime: recordingTimerRef.current }]) + videoRecorder.current.start(200); + } - // reset the temporary chunks - videoChunks = [] - setRecording(false); - trackScreen && RecordingApi.Instance.pause(); - } - videoRecorder.current.onresume = async (event: any) => { - await startShowingStream(); - setRecording(true); - trackScreen && RecordingApi.Instance.resume(); - } + // if this is called, then we're done recording all the segments + const finish = (e: React.PointerEvent) => { + e.stopPropagation(); - videoRecorder.current.start(200) - } + // call stop on the video recorder if active + videoRecorder.current?.state !== "inactive" && videoRecorder.current?.stop(); + // end the streams (audio/video) to remove recording icon + const stream = videoElementRef.current!.srcObject; + stream instanceof MediaStream && stream.getTracks().forEach(track => track.stop()); - const stop = () => { - if (videoRecorder.current) { - if (videoRecorder.current.state !== "inactive") { - videoRecorder.current.stop(); - // recorder.current.stream.getTracks().forEach((track: any) => track.stop()) - } - } + // finish/clear the recoringApi + TrackMovements.Instance.finish(); + + // this will call upon progessbar to update videos to be in the correct order + setFinished(true); } - const pause = () => { - if (videoRecorder.current) { - if (videoRecorder.current.state === "recording") { - videoRecorder.current.pause(); - } - } + const pause = (e: React.PointerEvent) => { + e.stopPropagation(); + // if recording, then this is just a new segment + videoRecorder.current?.state === "recording" && videoRecorder.current.stop(); } - const startOrResume = (e: React.PointerEvent) => { - // the code to start or resume does not get triggered if we start dragging the button + const start = (e: React.PointerEvent) => { setupMoveUpEvents({}, e, returnTrue, returnFalse, e => { - if (!videoRecorder.current || videoRecorder.current.state === "inactive") { - record(); - } else if (videoRecorder.current.state === "paused") { - videoRecorder.current.resume(); - } + // start recording if not already recording + if (!videoRecorder.current || videoRecorder.current.state === "inactive") record(); + return true; // cancels propagation to documentView to avoid selecting it. }, false, false); } - const clearPrevious = () => { - const numVideos = videos.length - setRecordingTimer(numVideos == 1 ? 0 : videos[numVideos - 2].endTime) - setVideoProgressHelper(numVideos == 1 ? 0 : videos[numVideos - 2].endTime) - setVideos(videos.filter((_, idx) => idx !== numVideos - 1)); + const undoPrevious = (e: React.PointerEvent) => { + e.stopPropagation(); + setDoUndo(prev => !prev); } - const handleOnTimeUpdate = () => { - if (playing) { - setVideoProgressHelper(videoElementRef.current!.currentTime) - } - }; + const handleOnTimeUpdate = () => { playing && setVideoProgressHelper(videoElementRef.current!.currentTime); }; const millisecondToMinuteSecond = (milliseconds: number) => { const toTwoDigit = (digit: number) => { @@ -234,7 +217,8 @@ export function RecordingView(props: IRecordingViewProps) { <video id={`video-${props.id}`} autoPlay muted - onTimeUpdate={handleOnTimeUpdate} + onTimeUpdate={() => handleOnTimeUpdate()} + ref={videoElementRef} /> <div className="recording-sign"> <span className="dot" /> @@ -246,18 +230,18 @@ export function RecordingView(props: IRecordingViewProps) { <div className="record-button-wrapper"> {recording ? <button className="stop-button" onPointerDown={pause} /> : - <button className="record-button" onPointerDown={startOrResume} /> + <button className="record-button" onPointerDown={start} /> } </div> {!recording && (videos.length > 0 ? <div className="options-wrapper video-edit-wrapper"> - {/* <IconContext.Provider value={{ color: "grey", className: "video-edit-buttons" }}> - <MdBackspace onClick={clearPrevious} /> - </IconContext.Provider> */} + <IconContext.Provider value={{ color: "grey", className: "video-edit-buttons", style: { display: canUndo ? 'inherit' : 'none' } }}> + <MdBackspace onPointerDown={undoPrevious} /> + </IconContext.Provider> <IconContext.Provider value={{ color: "#cc1c08", className: "video-edit-buttons" }}> - <FaCheckCircle onClick={stop} /> + <FaCheckCircle onPointerDown={finish} /> </IconContext.Provider> </div> @@ -271,12 +255,17 @@ export function RecordingView(props: IRecordingViewProps) { </div> - <ProgressBar - progress={progress} - marks={videos.map((elt) => elt.endTime / MAXTIME * 100)} - // playSegment={playSegment} - /> </div> + + <ProgressBar + videos={videos} + setVideos={setVideos} + orderVideos={orderVideos} + progress={progress} + recording={recording} + doUndo={doUndo} + setCanUndo={setCanUndo} + /> </div> </div>) }
\ No newline at end of file |