aboutsummaryrefslogtreecommitdiff
path: root/src/client/views/nodes/RecordingBox/RecordingView.tsx
diff options
context:
space:
mode:
Diffstat (limited to 'src/client/views/nodes/RecordingBox/RecordingView.tsx')
-rw-r--r--src/client/views/nodes/RecordingBox/RecordingView.tsx349
1 files changed, 349 insertions, 0 deletions
diff --git a/src/client/views/nodes/RecordingBox/RecordingView.tsx b/src/client/views/nodes/RecordingBox/RecordingView.tsx
new file mode 100644
index 000000000..d2adff95a
--- /dev/null
+++ b/src/client/views/nodes/RecordingBox/RecordingView.tsx
@@ -0,0 +1,349 @@
+import * as React from 'react';
+import "./RecordingView.scss";
+import { ReactElement, useCallback, useEffect, useRef, useState } from "react";
+import { ProgressBar } from "./ProgressBar"
+import { MdBackspace } from 'react-icons/md';
+import { FaCheckCircle } from 'react-icons/fa';
+import { IconContext } from "react-icons";
+import { Networking } from '../../../Network';
+import { Upload } from '../../../../server/SharedMediaTypes';
+
+import { RecordingApi } from '../../../apis/recording/RecordingApi';
+
+
+enum RecordingStatus {
+ Recording,
+ Stopped,
+ Paused
+}
+
+interface MediaSegment {
+ videoChunks: any[],
+ endTime: number
+}
+
+interface IRecordingViewProps {
+ setResult: (info: Upload.FileInformation) => void
+ setDuration: (seconds: number) => void
+}
+
+const MAXTIME = 1000;
+
+export function RecordingView(props: IRecordingViewProps) {
+
+ const [recording, setRecording] = useState(false);
+ const recordingTimerRef = useRef<number>(0);
+ const [recordingTimer, setRecordingTimer] = useState(0); // unit is 0.01 second
+ const [playing, setPlaying] = useState(false);
+ const [progress, setProgress] = useState(0);
+ const [speed, setSpeed] = useState(1);
+ const [muted, setMuted] = useState(false);
+
+ const [videos, setVideos] = useState<MediaSegment[]>([]);
+ const [currentVid, setCurrentVid] = useState<number>(0);
+ const videoRecorder = useRef<MediaRecorder | null>(null);
+ const audioRecorder = useRef<MediaRecorder | null>(null);
+ const videoElementRef = useRef<HTMLVideoElement | null>(null);
+
+ const [finished, setFinished] = useState<Boolean>(false)
+
+
+
+ const DEFAULT_MEDIA_CONSTRAINTS = {
+ // video: true,
+ // audio: true
+ video: {
+ width: 1280,
+ height: 720,
+ },
+ // audio: true,
+ // audio: {
+ // echoCancellation: true,
+ // noiseSuppression: true,
+ // sampleRate: 44100
+ // }
+ }
+
+ useEffect(() => {
+
+ if (finished) {
+ props.setDuration(recordingTimer * 100)
+ let allVideoChunks: any = []
+ console.log(videos)
+ videos.forEach((vid) => {
+ console.log(vid.videoChunks)
+ allVideoChunks = allVideoChunks.concat(vid.videoChunks)
+ })
+
+ console.log(allVideoChunks)
+ const videoFile = new File(allVideoChunks, "video.mkv", { type: allVideoChunks[0].type, lastModified: Date.now() });
+
+ // const uploadVideo = async () => {
+ // const [{ result }] = await Networking.UploadFilesToServer(videoFile);
+ // console.log("upload result", result);
+ // if (!(result instanceof Error)) {
+ // setResult(result)
+ // }
+ // }
+
+ Networking.UploadFilesToServer(videoFile)
+ .then((data) => {
+ const result = data[0].result
+ if (!(result instanceof Error)) { // convert this screenshotBox into normal videoBox
+ props.setResult(result)
+ } else {
+ alert("video conversion failed");
+ }
+ })
+ // uploadVideo()
+
+ // this.dataDoc[this.fieldKey + "-duration"] = (new Date().getTime() - this.recordingStart!) / 1000;
+
+
+
+ // change to one recording box
+ }
+
+
+ }, [finished])
+
+ useEffect(() => {
+ // check if the browser supports media devices on first load
+ if (!navigator.mediaDevices) {
+ console.log('This browser does not support getUserMedia.')
+ }
+ console.log('This device has the correct media devices.')
+ }, [])
+
+ useEffect(() => {
+ // get access to the video element on every render
+ videoElementRef.current = document.getElementById('video') as HTMLVideoElement;
+ })
+
+ useEffect(() => {
+ let interval: any = null;
+ if (recording) {
+ interval = setInterval(() => {
+ setRecordingTimer(unit => unit + 1);
+ }, 10);
+ } else if (!recording && recordingTimer !== 0) {
+ clearInterval(interval);
+ }
+ return () => clearInterval(interval);
+ }, [recording])
+
+ useEffect(() => {
+ setVideoProgressHelper(recordingTimer)
+ recordingTimerRef.current = recordingTimer;
+ }, [recordingTimer])
+
+ const setVideoProgressHelper = (progress: number) => {
+ const newProgress = (progress / MAXTIME) * 100;
+ setProgress(newProgress)
+ }
+ const startShowingStream = async (mediaConstraints = DEFAULT_MEDIA_CONSTRAINTS) => {
+ const stream = await navigator.mediaDevices.getUserMedia(mediaConstraints)
+
+ videoElementRef.current!.src = ""
+ videoElementRef.current!.srcObject = stream
+ videoElementRef.current!.muted = true
+
+ return stream
+ }
+
+ const record = async () => {
+ const stream = await startShowingStream();
+ videoRecorder.current = new MediaRecorder(stream)
+ // audioRecorder.current = new MediaRecorder(await navigator.mediaDevices.getUserMedia({ audio: true }));
+
+ // temporary chunks of video
+ let videoChunks: any = []
+ // let audioChunks: any = []
+
+ videoRecorder.current.ondataavailable = (event: any) => {
+ if (event.data.size > 0) {
+ videoChunks.push(event.data)
+ }
+ }
+
+ // audioRecorder.current.ondataavailable = (event: any) => {
+ // if (event.data.size > 0) {
+ // audioChunks.push(event.data)
+ // }
+ // }
+
+ videoRecorder.current.onstart = (event: any) => {
+ setRecording(true);
+ }
+
+ videoRecorder.current.onstop = () => {
+ // if we have a last portion
+ if (videoChunks.length > 1) {
+ // append the current portion to the video pieces
+ setVideos(videos => [...videos, { videoChunks: videoChunks, endTime: recordingTimerRef.current }])
+ }
+
+ // reset the temporary chunks
+ videoChunks = []
+ setRecording(false);
+ setFinished(true);
+ }
+
+ // recording paused
+ videoRecorder.current.onpause = (event: any) => {
+ // append the current portion to the video pieces
+ console.log(videoChunks)
+ setVideos(videos => [...videos, { videoChunks: videoChunks, endTime: recordingTimerRef.current }])
+
+ // reset the temporary chunks
+ videoChunks = []
+ setRecording(false);
+ }
+
+ videoRecorder.current.onresume = async (event: any) => {
+ console.log(event)
+ await startShowingStream();
+ setRecording(true);
+ }
+
+ videoRecorder.current.start(200)
+ }
+
+
+ const stop = () => {
+ if (videoRecorder.current) {
+ if (videoRecorder.current.state !== "inactive") {
+ videoRecorder.current.stop();
+ // recorder.current.stream.getTracks().forEach((track: any) => track.stop())
+ }
+ }
+ }
+
+ const pause = () => {
+ if (videoRecorder.current) {
+ if (videoRecorder.current.state === "recording") {
+ videoRecorder.current.pause();
+ }
+ }
+ }
+
+ const startOrResume = () => {
+ console.log('[RecordingView.tsx] startOrResume')
+ if (!videoRecorder.current || videoRecorder.current.state === "inactive") {
+ record();
+ } else if (videoRecorder.current.state === "paused") {
+ videoRecorder.current.resume();
+ }
+ }
+
+ // const playSegment = (idx: number) => {
+ // console.log(idx)
+ // let currentChunks = videos[idx].chunks
+ // console.log(currentChunks)
+
+ // const blob = new Blob(currentChunks, {
+ // type: 'video/webm'
+ // })
+ // const blobUrl = URL.createObjectURL(blob)
+ // console.log(blobUrl)
+
+ // videoElementRef.current!.srcObject = null
+ // videoElementRef.current!.src = blobUrl
+ // videoElementRef.current!.muted = false
+ // }
+
+ const clearPrevious = () => {
+ const numVideos = videos.length
+ setRecordingTimer(numVideos == 1 ? 0 : videos[numVideos - 2].endTime)
+ setVideoProgressHelper(numVideos == 1 ? 0 : videos[numVideos - 2].endTime)
+ setVideos(videos.filter((_, idx) => idx !== numVideos - 1));
+ }
+
+ // const handleVideoProgress = (event: any) => {
+ // const manualChange = Number(event.target.value);
+ // videoElement!.currentTime = (videoElement!.duration / 100) * manualChange;
+ // setProgress(manualChange)
+ // };
+
+ // const handleVideoSpeed = (event: any) => {
+ // const newSpeed = Number(event.target.value);
+ // videoElement!.playbackRate = speed;
+ // setSpeed(newSpeed)
+ // };
+
+ const handleOnTimeUpdate = () => {
+ if (playing) {
+ setVideoProgressHelper(videoElementRef.current!.currentTime)
+ }
+ };
+
+ const millisecondToMinuteSecond = (milliseconds: number) => {
+ const toTwoDigit = (digit: number) => {
+ return String(digit).length == 1 ? "0" + digit : digit
+ }
+ const minutes = Math.floor((milliseconds % (1000 * 60 * 60)) / (1000 * 60));
+ const seconds = Math.floor((milliseconds % (1000 * 60)) / 1000);
+ return toTwoDigit(minutes) + " : " + toTwoDigit(seconds);
+ }
+
+
+
+
+ useEffect(() => {
+ console.log(videos.map((elt) => elt.endTime / MAXTIME * 100))
+ console.log(videos)
+ }, [videos])
+
+ return (
+ <div className="recording-container">
+ <div className="video-wrapper">
+ <video id="video"
+ autoPlay
+ muted
+ onTimeUpdate={handleOnTimeUpdate}
+ />
+ <div className="recording-sign">
+ <span className="dot" />
+ <p className="timer">{millisecondToMinuteSecond(recordingTimer * 10)}</p>
+ </div>
+ <div className="controls">
+
+ <div className="controls-inner-container">
+ <div className="record-button-wrapper">
+ {recording ?
+ <button className="stop-button" onClick={pause} /> :
+ <button className="record-button" onClick={startOrResume} />
+ }
+ </div>
+ {!recording && videos.length > 0 ?
+
+ <div className="video-edit-wrapper">
+ <IconContext.Provider value={{ color: "grey", className: "video-edit-buttons" }}>
+ <MdBackspace onClick={clearPrevious} />
+ </IconContext.Provider>
+ <IconContext.Provider value={{ color: "#cc1c08", className: "video-edit-buttons" }}>
+ <FaCheckCircle onClick={stop} />
+ </IconContext.Provider>
+ </div>
+
+ : <></>}
+
+ </div>
+
+ <ProgressBar
+ progress={progress}
+ marks={videos.map((elt) => elt.endTime / MAXTIME * 100)}
+ // playSegment={playSegment}
+ />
+
+ {/* <button className="mute-btn" onClick={() => setMuted(!muted)}>
+ {!muted ? (
+ <i className="bx bxs-volume-full"></i>
+ ) : (
+ <i className="bx bxs-volume-mute"></i>
+ )}
+ </button> */}
+ </div>
+ </div>
+ </div>)
+} \ No newline at end of file