diff options
| author | Michael Foiani <sotech117@michaels-mbp-3.devices.brown.edu> | 2022-04-28 17:24:37 -0400 |
|---|---|---|
| committer | Michael Foiani <sotech117@michaels-mbp-3.devices.brown.edu> | 2022-04-28 17:24:37 -0400 |
| commit | 22fe2791b6a6e92cc4d0ad953363120b51bd6e2c (patch) | |
| tree | 0896be213b50026fdf5a05afb64b28328253d757 /src/client/views/nodes/RecordingBox | |
| parent | 42819362e50bc35b3bca228607fcc516d528bb1b (diff) | |
| parent | 99ae2ccde9dbcf6bae75edea231d4b10c736a692 (diff) | |
Merge with jenny
Diffstat (limited to 'src/client/views/nodes/RecordingBox')
| -rw-r--r-- | src/client/views/nodes/RecordingBox/RecordingBox.tsx | 52 | ||||
| -rw-r--r-- | src/client/views/nodes/RecordingBox/RecordingView.scss | 5 | ||||
| -rw-r--r-- | src/client/views/nodes/RecordingBox/RecordingView.tsx | 155 |
3 files changed, 125 insertions, 87 deletions
diff --git a/src/client/views/nodes/RecordingBox/RecordingBox.tsx b/src/client/views/nodes/RecordingBox/RecordingBox.tsx index 6d444d324..86358e838 100644 --- a/src/client/views/nodes/RecordingBox/RecordingBox.tsx +++ b/src/client/views/nodes/RecordingBox/RecordingBox.tsx @@ -1,24 +1,54 @@ +import { action, observable } from "mobx"; import { observer } from "mobx-react"; import * as React from "react"; +import { AudioField, VideoField } from "../../../../fields/URLField"; +import { Upload } from "../../../../server/SharedMediaTypes"; import { ViewBoxBaseComponent } from "../../DocComponent"; import { FieldView } from "../FieldView"; +import { VideoBox } from "../VideoBox"; import { RecordingView } from './RecordingView'; +import { DocumentType } from "../../../documents/DocumentTypes"; @observer -export class RecordingBox extends ViewBoxBaseComponent(){ +export class RecordingBox extends ViewBoxBaseComponent() { - public static LayoutString(fieldKey: string) { return FieldView.LayoutString(RecordingBox, fieldKey); } + public static LayoutString(fieldKey: string) { return FieldView.LayoutString(RecordingBox, fieldKey); } - private _ref: React.RefObject<HTMLDivElement> = React.createRef(); + private _ref: React.RefObject<HTMLDivElement> = React.createRef(); - constructor(props: any) { - super(props); - } + constructor(props: any) { + super(props); + } - render() { - return <div className="recordingBox" ref={this._ref}> - <RecordingView/> - </div>; - } + @observable result: Upload.FileInformation | undefined = undefined + @observable videoDuration: number | undefined = undefined + + @action + setVideoDuration = (duration: number) => { + this.videoDuration = duration + } + + @action + setResult = (info: Upload.FileInformation) => { + console.log("Setting result to " + info) + this.result = info + console.log(this.result.accessPaths.agnostic.client) + this.dataDoc.type = DocumentType.VID; + console.log(this.videoDuration) + this.dataDoc[this.fieldKey + "-duration"] = this.videoDuration; + + this.layoutDoc.layout = VideoBox.LayoutString(this.fieldKey); + // this.dataDoc.nativeWidth = this.dataDoc.nativeHeight = undefined; + // this.layoutDoc._fitWidth = undefined; + this.dataDoc[this.props.fieldKey] = new VideoField(this.result.accessPaths.agnostic.client); + } + + render() { + return <div className="recordingBox" ref={this._ref}> + {!this.result && <RecordingView setResult={this.setResult} setDuration={this.setVideoDuration} />} + {/* {!this.result ? <RecordingView setResult={this.setResult} /> : + <p>video box</p>} */} + </div>; + } } diff --git a/src/client/views/nodes/RecordingBox/RecordingView.scss b/src/client/views/nodes/RecordingBox/RecordingView.scss index e4d971d51..1fea231b7 100644 --- a/src/client/views/nodes/RecordingBox/RecordingView.scss +++ b/src/client/views/nodes/RecordingBox/RecordingView.scss @@ -15,6 +15,7 @@ button { height: 100%; width: 100%; display: flex; + pointer-events: all; } .video-wrapper { @@ -157,8 +158,8 @@ button { background-color: red; border: 0px; border-radius: 10%; - height: 80%; - width: 80%; + height: 70%; + width: 70%; align-self: center; margin: 0; diff --git a/src/client/views/nodes/RecordingBox/RecordingView.tsx b/src/client/views/nodes/RecordingBox/RecordingView.tsx index fab7bc990..d2adff95a 100644 --- a/src/client/views/nodes/RecordingBox/RecordingView.tsx +++ b/src/client/views/nodes/RecordingBox/RecordingView.tsx @@ -5,6 +5,8 @@ import { ProgressBar } from "./ProgressBar" import { MdBackspace } from 'react-icons/md'; import { FaCheckCircle } from 'react-icons/fa'; import { IconContext } from "react-icons"; +import { Networking } from '../../../Network'; +import { Upload } from '../../../../server/SharedMediaTypes'; import { RecordingApi } from '../../../apis/recording/RecordingApi'; @@ -15,14 +17,19 @@ enum RecordingStatus { Paused } -interface VideoSegment { - chunks: any[], +interface MediaSegment { + videoChunks: any[], endTime: number } +interface IRecordingViewProps { + setResult: (info: Upload.FileInformation) => void + setDuration: (seconds: number) => void +} + const MAXTIME = 1000; -export function RecordingView() { +export function RecordingView(props: IRecordingViewProps) { const [recording, setRecording] = useState(false); const recordingTimerRef = useRef<number>(0); @@ -32,10 +39,10 @@ export function RecordingView() { const [speed, setSpeed] = useState(1); const [muted, setMuted] = useState(false); - const [videos, setVideos] = useState<VideoSegment[]>([]); - // const [videos, setVideos] = useState<string[]>([]); + const [videos, setVideos] = useState<MediaSegment[]>([]); const [currentVid, setCurrentVid] = useState<number>(0); - const recorder = useRef<MediaRecorder | null>(null); + const videoRecorder = useRef<MediaRecorder | null>(null); + const audioRecorder = useRef<MediaRecorder | null>(null); const videoElementRef = useRef<HTMLVideoElement | null>(null); const [finished, setFinished] = useState<Boolean>(false) @@ -43,49 +50,57 @@ export function RecordingView() { const DEFAULT_MEDIA_CONSTRAINTS = { + // video: true, + // audio: true video: { width: 1280, height: 720, }, - audio: { - echoCancellation: true, - noiseSuppression: true, - sampleRate: 44100 - } + // audio: true, + // audio: { + // echoCancellation: true, + // noiseSuppression: true, + // sampleRate: 44100 + // } } useEffect(() => { if (finished) { + props.setDuration(recordingTimer * 100) let allVideoChunks: any = [] console.log(videos) videos.forEach((vid) => { - console.log(vid.chunks) - allVideoChunks = allVideoChunks.concat(vid.chunks) + console.log(vid.videoChunks) + allVideoChunks = allVideoChunks.concat(vid.videoChunks) }) console.log(allVideoChunks) - - const blob = new Blob(allVideoChunks, { - type: 'video/webm' - }) - const blobUrl = URL.createObjectURL(blob) - - videoElementRef.current!.srcObject = null - videoElementRef.current!.src = blobUrl - videoElementRef.current!.muted = false - - // clear the recording api - const presentation = RecordingApi.finish() - RecordingApi.clear() - - // upload data - // const [{ result }] = await Networking.UploadFilesToServer(e.data); - // console.log("Data result", result); - // if (!(result instanceof Error)) { - // this.props.Document[this.fieldKey] = new AudioField(result.accessPaths.agnostic.client); + const videoFile = new File(allVideoChunks, "video.mkv", { type: allVideoChunks[0].type, lastModified: Date.now() }); + + // const uploadVideo = async () => { + // const [{ result }] = await Networking.UploadFilesToServer(videoFile); + // console.log("upload result", result); + // if (!(result instanceof Error)) { + // setResult(result) + // } // } + Networking.UploadFilesToServer(videoFile) + .then((data) => { + const result = data[0].result + if (!(result instanceof Error)) { // convert this screenshotBox into normal videoBox + props.setResult(result) + } else { + alert("video conversion failed"); + } + }) + // uploadVideo() + + // this.dataDoc[this.fieldKey + "-duration"] = (new Date().getTime() - this.recordingStart!) / 1000; + + + // change to one recording box } @@ -102,21 +117,9 @@ export function RecordingView() { useEffect(() => { // get access to the video element on every render - // videoElement = document.getElementById('video') as HTMLVideoElement; videoElementRef.current = document.getElementById('video') as HTMLVideoElement; }) - // useEffect(() => { - // if (playing) { - // videoElement!.srcObject = null - // // videoElement!.src = videos[currentVid].url - // videoElement!.muted = false - // videoElement!.play() - // } else { - // videoElement!.pause(); - // } - // }, [playing, videoElement]); - useEffect(() => { let interval: any = null; if (recording) { @@ -150,82 +153,86 @@ export function RecordingView() { const record = async () => { const stream = await startShowingStream(); - recorder.current = new MediaRecorder(stream) + videoRecorder.current = new MediaRecorder(stream) + // audioRecorder.current = new MediaRecorder(await navigator.mediaDevices.getUserMedia({ audio: true })); // temporary chunks of video - let chunks: any = [] - recorder.current.ondataavailable = (event: any) => { - // store the video chunks as it is recording - console.log("data available") + let videoChunks: any = [] + // let audioChunks: any = [] + + videoRecorder.current.ondataavailable = (event: any) => { if (event.data.size > 0) { - chunks.push(event.data) + videoChunks.push(event.data) } } - recorder.current.onstart = (event: any) => { - console.log("on start") + // audioRecorder.current.ondataavailable = (event: any) => { + // if (event.data.size > 0) { + // audioChunks.push(event.data) + // } + // } + + videoRecorder.current.onstart = (event: any) => { setRecording(true); } - recorder.current.onstop = () => { + videoRecorder.current.onstop = () => { // if we have a last portion - if (chunks.length > 1) { + if (videoChunks.length > 1) { // append the current portion to the video pieces - setVideos(videos => [...videos, { chunks: chunks, endTime: recordingTimerRef.current }]) + setVideos(videos => [...videos, { videoChunks: videoChunks, endTime: recordingTimerRef.current }]) } // reset the temporary chunks - chunks = [] + videoChunks = [] setRecording(false); setFinished(true); } // recording paused - recorder.current.onpause = (event: any) => { + videoRecorder.current.onpause = (event: any) => { // append the current portion to the video pieces - console.log(chunks) - setVideos(videos => [...videos, { chunks: chunks, endTime: recordingTimerRef.current }]) + console.log(videoChunks) + setVideos(videos => [...videos, { videoChunks: videoChunks, endTime: recordingTimerRef.current }]) // reset the temporary chunks - chunks = [] + videoChunks = [] setRecording(false); } - recorder.current.onresume = async (event: any) => { + videoRecorder.current.onresume = async (event: any) => { console.log(event) await startShowingStream(); setRecording(true); } - recorder.current.start(200) + videoRecorder.current.start(200) } const stop = () => { - if (recorder.current) { - if (recorder.current.state !== "inactive") { - recorder.current.stop(); + if (videoRecorder.current) { + if (videoRecorder.current.state !== "inactive") { + videoRecorder.current.stop(); // recorder.current.stream.getTracks().forEach((track: any) => track.stop()) } } } const pause = () => { - if (recorder.current) { - if (recorder.current.state === "recording") { - recorder.current.pause(); - const err = RecordingApi.pause() + if (videoRecorder.current) { + if (videoRecorder.current.state === "recording") { + videoRecorder.current.pause(); } } } const startOrResume = () => { console.log('[RecordingView.tsx] startOrResume') - if (!recorder.current || recorder.current.state === "inactive") { - record(); - const err = RecordingApi.initAndStart() - } else if (recorder.current.state === "paused") { - recorder.current.resume(); + if (!videoRecorder.current || videoRecorder.current.state === "inactive") { + record(); + } else if (videoRecorder.current.state === "paused") { + videoRecorder.current.resume(); } } |
