import * as React from 'react'; import "./RecordingView.scss"; import { ReactElement, useCallback, useEffect, useRef, useState } from "react"; import { ProgressBar } from "./ProgressBar" import { MdBackspace } from 'react-icons/md'; import { FaCheckCircle } from 'react-icons/fa'; import { IconContext } from "react-icons"; import { Networking } from '../../../Network'; import { Upload } from '../../../../server/SharedMediaTypes'; import { RecordingApi } from '../../../util/RecordingApi'; import { DashUploadUtils } from '../../../../server/DashUploadUtils'; export interface MediaSegment { videoChunks: any[], endTime: number, startTime: number } interface IRecordingViewProps { setResult: (info: Upload.FileInformation, trackScreen: boolean) => void setDuration: (seconds: number) => void id: string } const MAXTIME = 100000; export function RecordingView(props: IRecordingViewProps) { const [recording, setRecording] = useState(false); const recordingTimerRef = useRef(0); const [recordingTimer, setRecordingTimer] = useState(0); // unit is 0.01 second const [playing, setPlaying] = useState(false); const [progress, setProgress] = useState(0); const [videos, setVideos] = useState([]); const [orderVideos, setOrderVideos] = useState(false); const videoRecorder = useRef(null); const videoElementRef = useRef(null); const [finished, setFinished] = useState(false) const [trackScreen, setTrackScreen] = useState(true) const DEFAULT_MEDIA_CONSTRAINTS = { video: { width: 1280, height: 720, }, audio: { echoCancellation: true, noiseSuppression: true, sampleRate: 44100 } } useEffect(() => { console.log('in videos useEffect') if (finished) { (async () => { const inputPaths: string[] = []; const videoFiles: File[] = [] videos.forEach(async (vid, i) => { console.log(vid) const videoFile = new File(vid.videoChunks, `segvideo${i}.mkv`, { type: vid.videoChunks[0].type, lastModified: Date.now() }); videoFiles.push(videoFile); const { name } = videoFile; inputPaths.push(name) }) console.log(inputPaths) const data = await Networking.UploadSegmentsAndConcatenate(videoFiles) console.log('data', data) const result = data[0].result if (!(result instanceof Error)) { // convert this screenshotBox into normal videoBox props.setResult(result, trackScreen) } else { alert("video conversion failed"); } // const inputListName = 'order.txt'; // fs.writeFileSync(inputListName, inputPaths.join('\n')); // var merge = ffmpeg(); // merge.input(inputListName) // .inputOptions(['-f concat', '-safe 0']) // .outputOptions('-c copy') // .save('output.mp4') // fs.unlinkSync(inputListName); // const combined = await DashUploadUtils.combineSegments(videoFiles, inputPaths) // console.log('combined', combined) // const outputFile = new File(['output.mp4'], 'output.mp4', { type: 'video/mp4', lastModified: Date.now() }); // const data = await Networking.UploadFilesToServer(combined) // const result = data[0].result // if (!(result instanceof Error)) { // convert this screenshotBox into normal videoBox // props.setResult(result, trackScreen) // } else { // alert("video conversion failed"); // } // if (format.includes("x-matroska")) { // await new Promise(res => ffmpeg(file.path) // .videoCodec("copy") // this will copy the data instead of reencode it // .save(file.path.replace(".mkv", ".mp4")) // .on('end', res)); // file.path = file.path.replace(".mkv", ".mp4"); // format = ".mp4"; // } // console.log('crossOriginIsolated', crossOriginIsolated) // props.setDuration(recordingTimer * 100) // console.log('Loading ffmpeg-core.js'); // const ffmpeg = createFFmpeg({ log: true }); // await ffmpeg.load(); // console.log('ffmpeg-core.js loaded'); // let allVideoChunks: any = []; // const inputPaths: string[] = []; // // write each segment into it's indexed file // videos.forEach(async (vid, i) => { // const vidName = `segvideo${i}.mkv` // inputPaths.push(vidName) // const videoFile = new File(vid.videoChunks, vidName, { type: allVideoChunks[0].type, lastModified: Date.now() }); // ffmpeg.FS('writeFile', vidName, await fetchFile(videoFile)); // // }) // ffmpeg.FS('writeFile', 'order.txt', inputPaths.join('\n')); // console.log('concat') // await ffmpeg.run('-f', 'concat', '-safe', '0', '-i', 'order.txt', 'ouput.mp4'); // const { buffer } = ffmpeg.FS('readFile', 'output.mp4'); // const concatVideo = new File([buffer], 'concat.mp4', { type: "video/mp4" }); // const data = await Networking.UploadFilesToServer(concatVideo) // const result = data[0].result // if (!(result instanceof Error)) { // convert this screenshotBox into normal videoBox // props.setResult(result, trackScreen) // } else { // alert("video conversion failed"); // } // // delete all files in MEMFS // inputPaths.forEach(path => ffmpeg.FS('unlink', path)); // ffmpeg.FS('unlink', 'order.txt'); // ffmpeg.FS('unlink', 'output.mp4'); })(); } }, [videos]) useEffect(() => { console.log('in finish useEffect') if (finished) { setOrderVideos(true); } }, [finished]) useEffect(() => { // check if the browser supports media devices on first load if (!navigator.mediaDevices) { console.log('This browser does not support getUserMedia.') } console.log('This device has the correct media devices.') }, []) useEffect(() => { // get access to the video element on every render videoElementRef.current = document.getElementById(`video-${props.id}`) as HTMLVideoElement; }) useEffect(() => { let interval: any = null; if (recording) { interval = setInterval(() => { setRecordingTimer(unit => unit + 1); }, 10); } else if (!recording && recordingTimer !== 0) { clearInterval(interval); } return () => clearInterval(interval); }, [recording]) useEffect(() => { setVideoProgressHelper(recordingTimer) recordingTimerRef.current = recordingTimer; }, [recordingTimer]) const setVideoProgressHelper = (progress: number) => { const newProgress = (progress / MAXTIME) * 100; setProgress(newProgress) } const startShowingStream = async (mediaConstraints = DEFAULT_MEDIA_CONSTRAINTS) => { const stream = await navigator.mediaDevices.getUserMedia(mediaConstraints) videoElementRef.current!.src = "" videoElementRef.current!.srcObject = stream videoElementRef.current!.muted = true return stream } const record = async () => { const stream = await startShowingStream(); videoRecorder.current = new MediaRecorder(stream) // temporary chunks of video let videoChunks: any = [] videoRecorder.current.ondataavailable = (event: any) => { if (event.data.size > 0) { videoChunks.push(event.data) } } videoRecorder.current.onstart = (event: any) => { setRecording(true); trackScreen && RecordingApi.Instance.start(); } videoRecorder.current.onstop = () => { // if we have a last portion if (videoChunks.length > 1) { // append the current portion to the video pieces setVideos(videos => [...videos, { videoChunks: videoChunks, endTime: recordingTimerRef.current, startTime: videos?.lastElement()?.endTime || 0 }]) } // reset the temporary chunks videoChunks = [] setRecording(false); setFinished(true); trackScreen && RecordingApi.Instance.pause(); } // recording paused videoRecorder.current.onpause = (event: any) => { // append the current portion to the video pieces setVideos(videos => [...videos, { videoChunks: videoChunks, endTime: recordingTimerRef.current, startTime: videos?.lastElement()?.endTime || 0 }]) // reset the temporary chunks videoChunks = [] setRecording(false); trackScreen && RecordingApi.Instance.pause(); } videoRecorder.current.onresume = async (event: any) => { await startShowingStream(); setRecording(true); trackScreen && RecordingApi.Instance.resume(); } videoRecorder.current.start(200) } const stop = () => { if (videoRecorder.current) { if (videoRecorder.current.state !== "inactive") { videoRecorder.current.stop(); // recorder.current.stream.getTracks().forEach((track: any) => track.stop()) } } } const pause = () => { if (videoRecorder.current) { if (videoRecorder.current.state === "recording") { videoRecorder.current.pause(); } } } const startOrResume = () => { if (!videoRecorder.current || videoRecorder.current.state === "inactive") { record(); } else if (videoRecorder.current.state === "paused") { videoRecorder.current.resume(); } } const clearPrevious = () => { const numVideos = videos.length setRecordingTimer(numVideos == 1 ? 0 : videos[numVideos - 2].endTime) setVideoProgressHelper(numVideos == 1 ? 0 : videos[numVideos - 2].endTime) setVideos(videos.filter((_, idx) => idx !== numVideos - 1)); } const handleOnTimeUpdate = () => { if (playing) { setVideoProgressHelper(videoElementRef.current!.currentTime) } }; const millisecondToMinuteSecond = (milliseconds: number) => { const toTwoDigit = (digit: number) => { return String(digit).length == 1 ? "0" + digit : digit } const minutes = Math.floor((milliseconds % (1000 * 60 * 60)) / (1000 * 60)); const seconds = Math.floor((milliseconds % (1000 * 60)) / 1000); return toTwoDigit(minutes) + " : " + toTwoDigit(seconds); } const doTranscode = async () => { // console.log('Start transcoding'); // ffmpeg.FS('writeFile', 'test.avi', await fetchFile('/flame.avi')); // await ffmpeg.run('-i', 'test.avi', 'test.mp4'); // console.log('Complete transcoding'); // const data = ffmpeg.FS('readFile', 'test.mp4'); // console.log(URL.createObjectURL(new Blob([data.buffer], { type: 'video/mp4' }))); }; return (
) }