aboutsummaryrefslogtreecommitdiff
path: root/src/client/views/nodes/RecordingBox/RecordingView.tsx
diff options
context:
space:
mode:
authorMichael Foiani <sotech117@michaels-mbp-3.devices.brown.edu>2022-04-28 17:24:37 -0400
committerMichael Foiani <sotech117@michaels-mbp-3.devices.brown.edu>2022-04-28 17:24:37 -0400
commit22fe2791b6a6e92cc4d0ad953363120b51bd6e2c (patch)
tree0896be213b50026fdf5a05afb64b28328253d757 /src/client/views/nodes/RecordingBox/RecordingView.tsx
parent42819362e50bc35b3bca228607fcc516d528bb1b (diff)
parent99ae2ccde9dbcf6bae75edea231d4b10c736a692 (diff)
Merge with jenny
Diffstat (limited to 'src/client/views/nodes/RecordingBox/RecordingView.tsx')
-rw-r--r--src/client/views/nodes/RecordingBox/RecordingView.tsx155
1 files changed, 81 insertions, 74 deletions
diff --git a/src/client/views/nodes/RecordingBox/RecordingView.tsx b/src/client/views/nodes/RecordingBox/RecordingView.tsx
index fab7bc990..d2adff95a 100644
--- a/src/client/views/nodes/RecordingBox/RecordingView.tsx
+++ b/src/client/views/nodes/RecordingBox/RecordingView.tsx
@@ -5,6 +5,8 @@ import { ProgressBar } from "./ProgressBar"
import { MdBackspace } from 'react-icons/md';
import { FaCheckCircle } from 'react-icons/fa';
import { IconContext } from "react-icons";
+import { Networking } from '../../../Network';
+import { Upload } from '../../../../server/SharedMediaTypes';
import { RecordingApi } from '../../../apis/recording/RecordingApi';
@@ -15,14 +17,19 @@ enum RecordingStatus {
Paused
}
-interface VideoSegment {
- chunks: any[],
+interface MediaSegment {
+ videoChunks: any[],
endTime: number
}
+interface IRecordingViewProps {
+ setResult: (info: Upload.FileInformation) => void
+ setDuration: (seconds: number) => void
+}
+
const MAXTIME = 1000;
-export function RecordingView() {
+export function RecordingView(props: IRecordingViewProps) {
const [recording, setRecording] = useState(false);
const recordingTimerRef = useRef<number>(0);
@@ -32,10 +39,10 @@ export function RecordingView() {
const [speed, setSpeed] = useState(1);
const [muted, setMuted] = useState(false);
- const [videos, setVideos] = useState<VideoSegment[]>([]);
- // const [videos, setVideos] = useState<string[]>([]);
+ const [videos, setVideos] = useState<MediaSegment[]>([]);
const [currentVid, setCurrentVid] = useState<number>(0);
- const recorder = useRef<MediaRecorder | null>(null);
+ const videoRecorder = useRef<MediaRecorder | null>(null);
+ const audioRecorder = useRef<MediaRecorder | null>(null);
const videoElementRef = useRef<HTMLVideoElement | null>(null);
const [finished, setFinished] = useState<Boolean>(false)
@@ -43,49 +50,57 @@ export function RecordingView() {
const DEFAULT_MEDIA_CONSTRAINTS = {
+ // video: true,
+ // audio: true
video: {
width: 1280,
height: 720,
},
- audio: {
- echoCancellation: true,
- noiseSuppression: true,
- sampleRate: 44100
- }
+ // audio: true,
+ // audio: {
+ // echoCancellation: true,
+ // noiseSuppression: true,
+ // sampleRate: 44100
+ // }
}
useEffect(() => {
if (finished) {
+ props.setDuration(recordingTimer * 100)
let allVideoChunks: any = []
console.log(videos)
videos.forEach((vid) => {
- console.log(vid.chunks)
- allVideoChunks = allVideoChunks.concat(vid.chunks)
+ console.log(vid.videoChunks)
+ allVideoChunks = allVideoChunks.concat(vid.videoChunks)
})
console.log(allVideoChunks)
-
- const blob = new Blob(allVideoChunks, {
- type: 'video/webm'
- })
- const blobUrl = URL.createObjectURL(blob)
-
- videoElementRef.current!.srcObject = null
- videoElementRef.current!.src = blobUrl
- videoElementRef.current!.muted = false
-
- // clear the recording api
- const presentation = RecordingApi.finish()
- RecordingApi.clear()
-
- // upload data
- // const [{ result }] = await Networking.UploadFilesToServer(e.data);
- // console.log("Data result", result);
- // if (!(result instanceof Error)) {
- // this.props.Document[this.fieldKey] = new AudioField(result.accessPaths.agnostic.client);
+ const videoFile = new File(allVideoChunks, "video.mkv", { type: allVideoChunks[0].type, lastModified: Date.now() });
+
+ // const uploadVideo = async () => {
+ // const [{ result }] = await Networking.UploadFilesToServer(videoFile);
+ // console.log("upload result", result);
+ // if (!(result instanceof Error)) {
+ // setResult(result)
+ // }
// }
+ Networking.UploadFilesToServer(videoFile)
+ .then((data) => {
+ const result = data[0].result
+ if (!(result instanceof Error)) { // convert this screenshotBox into normal videoBox
+ props.setResult(result)
+ } else {
+ alert("video conversion failed");
+ }
+ })
+ // uploadVideo()
+
+ // this.dataDoc[this.fieldKey + "-duration"] = (new Date().getTime() - this.recordingStart!) / 1000;
+
+
+
// change to one recording box
}
@@ -102,21 +117,9 @@ export function RecordingView() {
useEffect(() => {
// get access to the video element on every render
- // videoElement = document.getElementById('video') as HTMLVideoElement;
videoElementRef.current = document.getElementById('video') as HTMLVideoElement;
})
- // useEffect(() => {
- // if (playing) {
- // videoElement!.srcObject = null
- // // videoElement!.src = videos[currentVid].url
- // videoElement!.muted = false
- // videoElement!.play()
- // } else {
- // videoElement!.pause();
- // }
- // }, [playing, videoElement]);
-
useEffect(() => {
let interval: any = null;
if (recording) {
@@ -150,82 +153,86 @@ export function RecordingView() {
const record = async () => {
const stream = await startShowingStream();
- recorder.current = new MediaRecorder(stream)
+ videoRecorder.current = new MediaRecorder(stream)
+ // audioRecorder.current = new MediaRecorder(await navigator.mediaDevices.getUserMedia({ audio: true }));
// temporary chunks of video
- let chunks: any = []
- recorder.current.ondataavailable = (event: any) => {
- // store the video chunks as it is recording
- console.log("data available")
+ let videoChunks: any = []
+ // let audioChunks: any = []
+
+ videoRecorder.current.ondataavailable = (event: any) => {
if (event.data.size > 0) {
- chunks.push(event.data)
+ videoChunks.push(event.data)
}
}
- recorder.current.onstart = (event: any) => {
- console.log("on start")
+ // audioRecorder.current.ondataavailable = (event: any) => {
+ // if (event.data.size > 0) {
+ // audioChunks.push(event.data)
+ // }
+ // }
+
+ videoRecorder.current.onstart = (event: any) => {
setRecording(true);
}
- recorder.current.onstop = () => {
+ videoRecorder.current.onstop = () => {
// if we have a last portion
- if (chunks.length > 1) {
+ if (videoChunks.length > 1) {
// append the current portion to the video pieces
- setVideos(videos => [...videos, { chunks: chunks, endTime: recordingTimerRef.current }])
+ setVideos(videos => [...videos, { videoChunks: videoChunks, endTime: recordingTimerRef.current }])
}
// reset the temporary chunks
- chunks = []
+ videoChunks = []
setRecording(false);
setFinished(true);
}
// recording paused
- recorder.current.onpause = (event: any) => {
+ videoRecorder.current.onpause = (event: any) => {
// append the current portion to the video pieces
- console.log(chunks)
- setVideos(videos => [...videos, { chunks: chunks, endTime: recordingTimerRef.current }])
+ console.log(videoChunks)
+ setVideos(videos => [...videos, { videoChunks: videoChunks, endTime: recordingTimerRef.current }])
// reset the temporary chunks
- chunks = []
+ videoChunks = []
setRecording(false);
}
- recorder.current.onresume = async (event: any) => {
+ videoRecorder.current.onresume = async (event: any) => {
console.log(event)
await startShowingStream();
setRecording(true);
}
- recorder.current.start(200)
+ videoRecorder.current.start(200)
}
const stop = () => {
- if (recorder.current) {
- if (recorder.current.state !== "inactive") {
- recorder.current.stop();
+ if (videoRecorder.current) {
+ if (videoRecorder.current.state !== "inactive") {
+ videoRecorder.current.stop();
// recorder.current.stream.getTracks().forEach((track: any) => track.stop())
}
}
}
const pause = () => {
- if (recorder.current) {
- if (recorder.current.state === "recording") {
- recorder.current.pause();
- const err = RecordingApi.pause()
+ if (videoRecorder.current) {
+ if (videoRecorder.current.state === "recording") {
+ videoRecorder.current.pause();
}
}
}
const startOrResume = () => {
console.log('[RecordingView.tsx] startOrResume')
- if (!recorder.current || recorder.current.state === "inactive") {
- record();
- const err = RecordingApi.initAndStart()
- } else if (recorder.current.state === "paused") {
- recorder.current.resume();
+ if (!videoRecorder.current || videoRecorder.current.state === "inactive") {
+ record();
+ } else if (videoRecorder.current.state === "paused") {
+ videoRecorder.current.resume();
}
}