From daeed5592522c01ea2c38a438c24d914f117530b Mon Sep 17 00:00:00 2001 From: Michael Date: Mon, 13 Jun 2022 12:18:55 -0400 Subject: small fix on recordingApi to work with segments. now tracks accurately --- src/client/util/RecordingApi.ts | 53 ++++++++++++---------- .../views/nodes/RecordingBox/RecordingView.tsx | 27 ++++++----- 2 files changed, 44 insertions(+), 36 deletions(-) (limited to 'src') diff --git a/src/client/util/RecordingApi.ts b/src/client/util/RecordingApi.ts index ab6935e3b..009652f6e 100644 --- a/src/client/util/RecordingApi.ts +++ b/src/client/util/RecordingApi.ts @@ -25,7 +25,7 @@ export class RecordingApi { // instance variables private currentPresentation: Presentation; - private isRecording: boolean; + private tracking: boolean; private absoluteStart: number; @@ -38,7 +38,7 @@ export class RecordingApi { // init the instance variables this.currentPresentation = RecordingApi.NULL_PRESENTATION - this.isRecording = false; + this.tracking = false; this.absoluteStart = -1; // used for tracking movements in the view frame @@ -57,58 +57,61 @@ export class RecordingApi { return this.currentPresentation.movements === null } - public start = (meta?: Object): Error | undefined => { + public start = (meta?: Object) => { // check if already init a presentation if (!this.isInitPresenation) { console.log(this.currentPresentation) - console.trace('[recordingApi.ts] start() failed: current presentation data exists. please call clear() first.') - return new Error('[recordingApi.ts] start()') + console.trace('[recordingApi.ts] start() failed: current presentation data exists. please call clear() first.') } // update the presentation mode - Doc.UserDoc().presentationMode = 'recording' + Doc.UserDoc().presentationMode = 'recording'; // (1a) get start date for presenation - const startDate = new Date() + const startDate = new Date(); // (1b) set start timestamp to absolute timestamp - this.absoluteStart = startDate.getTime() + this.absoluteStart = startDate.getTime(); // (2) assign meta content if it exists this.currentPresentation.meta = meta || {} // (3) assign start date to currentPresenation this.currentPresentation.movements = [] - // (4) set isRecording true to allow trackMovements - this.isRecording = true + // (4) set tracking true to allow trackMovements + this.tracking = true } /* stops the video and returns the presentatation; if no presentation, returns undefined */ - public getPresentation = (): undefined | Presentation => { + public* yieldPresentation(clearData: boolean = true): Generator { // TODO: maybe archive the data? - if (this.isRecording) console.warn('[recordingApi.ts] getPresentation() : currently recording presentation.'); + // if (this.tracking) console.warn('[recordingApi.ts] getPresentation() : currently recording presentation.'); // update the presentation mode - Doc.UserDoc().presentationMode = 'none'; + // Doc.UserDoc().presentationMode = 'none'; // set the previus recording view to the play view this.playFFView = this.recordingFFView; // ensure we add the endTime now that they are done recording - return { ...this.currentPresentation, totalTime: new Date().getTime() - this.absoluteStart }; + yield { ...this.currentPresentation, totalTime: new Date().getTime() - this.absoluteStart }; + + // reset the current presentation + clearData && this.clear(); } public stop = (): void => { - // make is recording false - this.isRecording = false + // make is tracking false + this.tracking = false } public clear = (): void => { + // clear the disposeFunc if we are done (not recording) + if (!this.tracking) + this.removeRecordingFFView() // clear presenation data this.currentPresentation = RecordingApi.NULL_PRESENTATION // clear isRecording - this.isRecording = false + // this.tracking = false // clear absoluteStart this.absoluteStart = -1 - // clear the disposeFunc - this.removeRecordingFFView() } @@ -124,7 +127,7 @@ export class RecordingApi { // return new Error('[recordingApi.ts] pause(): no presentation') // } // // don't allow track movments - // this.isRecording = false + // this.tracking = false // // set adjust absoluteStart to add the time difference // const timestamp = new Date().getTime() @@ -132,14 +135,14 @@ export class RecordingApi { // } // public resume = () => { - // this.isRecording = true + // this.tracking = true // // set absoluteStart to the difference in time // this.absoluteStart = new Date().getTime() - this.absoluteStart // } private trackMovements = (panX: number, panY: number, scale: number = 0): Error | undefined => { // ensure we are recording - if (!this.isRecording) { + if (!this.tracking) { return new Error('[recordingApi.ts] trackMovements()') } // check to see if the presetation is init @@ -171,7 +174,7 @@ export class RecordingApi { // set the reaction to track the movements this.disposeFunc = reaction( () => ({ x: NumCast(view.Document.panX, -1), y: NumCast(view.Document.panY, -1), scale: NumCast(view.Document.viewScale, -1) }), - (res) => (res.x !== -1 && res.y !== -1 && this.isRecording) && this.trackMovements(res.x, res.y, res.scale) + (res) => (res.x !== -1 && res.y !== -1 && this.tracking) && this.trackMovements(res.x, res.y, res.scale) ) // for now, set the most recent recordingFFView to the playFFView @@ -262,11 +265,11 @@ export class RecordingApi { // make a public method that concatenates the movements of the an array of presentations into one array // TODO: consider the meta data of the presentations public concatPresentations = (presentations: Presentation[]): Presentation => { - console.info(presentations); + console.table(presentations); if (presentations.length === 0) return RecordingApi.NULL_PRESENTATION; const firstPresentation = presentations[0]; - let sumTime = presentations[0].totalTime; + let sumTime = firstPresentation.totalTime; let combinedPresentations = { ...firstPresentation } presentations.forEach((presentation, i) => { // already consider the first presentation diff --git a/src/client/views/nodes/RecordingBox/RecordingView.tsx b/src/client/views/nodes/RecordingBox/RecordingView.tsx index 380ac050d..be9f342bb 100644 --- a/src/client/views/nodes/RecordingBox/RecordingView.tsx +++ b/src/client/views/nodes/RecordingBox/RecordingView.tsx @@ -14,7 +14,7 @@ export interface MediaSegment { videoChunks: any[], endTime: number, startTime: number, - presenation?: Presentation, + presentation?: Presentation, } interface IRecordingViewProps { @@ -60,13 +60,14 @@ export function RecordingView(props: IRecordingViewProps) { } } - useEffect(() => console.info('progess', progress), [progress]) + // useEffect(() => console.info('progress', progress), [progress]) useEffect(() => { if (finished) { // make the total presentation that'll match the concatted video - const concatPres = trackScreen ? RecordingApi.Instance.concatPresentations(videos.map(v => v.presenation as Presentation)) : undefined; + const concatPres = RecordingApi.Instance.concatPresentations(videos.map(v => v.presentation as Presentation)); + console.log('concatPres', concatPres); // this async function uses the server to create the concatted video and then sets the result to it's accessPaths (async () => { @@ -91,7 +92,8 @@ export function RecordingView(props: IRecordingViewProps) { // check if the browser supports media devices on first load useEffect(() => { if (!navigator.mediaDevices) alert('This browser does not support getUserMedia.'); }, []) - useEffect(() => { + useEffect(() => { + console.log('recording useEffect', recording) let interval: any = null; if (recording) { interval = setInterval(() => { @@ -136,23 +138,26 @@ export function RecordingView(props: IRecordingViewProps) { videoRecorder.current.onstart = (event: any) => { setRecording(true); + // trackScreen && RecordingApi.Instance.start(); trackScreen && RecordingApi.Instance.start(); } videoRecorder.current.onstop = () => { - RecordingApi.Instance.stop(); + // RecordingApi.Instance.stop(); // if we have a last portion - if (videoChunks.length > 1) { + if (videoChunks.length > 1) { + const presentation = RecordingApi.Instance.yieldPresentation().next().value || undefined + console.log('presenation yield', JSON.parse(JSON.stringify(presentation))) // append the current portion to the video pieces setVideos(videos => [...videos, { videoChunks: videoChunks, endTime: recordingTimerRef.current, startTime: videos?.lastElement()?.endTime || 0, // RecordingApi.stop() will return undefined if no track screen - presenation: RecordingApi.Instance.getPresentation() + presentation }]) // now that we got the presentation data, we can clear for the next segment to be recorded - RecordingApi.Instance.clear(); + // RecordingApi.Instance.clear(); } // reset the temporary chunks @@ -193,7 +198,7 @@ export function RecordingView(props: IRecordingViewProps) { setupMoveUpEvents({}, e, returnTrue, returnFalse, e => { if (!videoRecorder.current || videoRecorder.current.state === "inactive") { record(); - // trackScreen && RecordingApi.Instance.start(); + // trackScreen && } return true; // cancels propagation to documentView to avoid selecting it. }, false, false); @@ -204,7 +209,7 @@ export function RecordingView(props: IRecordingViewProps) { setDoUndo(prev => !prev); } - const handleOnTimeUpdate = () => playing && setVideoProgressHelper(videoElementRef.current!.currentTime); + const handleOnTimeUpdate = () => { playing && setVideoProgressHelper(videoElementRef.current!.currentTime); }; const millisecondToMinuteSecond = (milliseconds: number) => { const toTwoDigit = (digit: number) => { @@ -221,7 +226,7 @@ export function RecordingView(props: IRecordingViewProps) {