aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorMichael <michael.foiani@gmail.com>2022-06-10 15:41:38 -0400
committerMichael <michael.foiani@gmail.com>2022-06-10 15:41:38 -0400
commit1688af3de54419029773fb85b78bca4500f7f0de (patch)
tree73e777815eb2c6e91e0adb3c4702bb7ef2f7178d /src
parent3627d2597ffb52f00c3b82456b1b6693006c93fa (diff)
big bug problems with recording. some reason interacting with the canvas is making the recording inactive - i think :/
Diffstat (limited to 'src')
-rw-r--r--src/client/util/RecordingApi.ts153
-rw-r--r--src/client/views/nodes/RecordingBox/ProgressBar.tsx8
-rw-r--r--src/client/views/nodes/RecordingBox/RecordingBox.tsx12
-rw-r--r--src/client/views/nodes/RecordingBox/RecordingView.tsx99
4 files changed, 166 insertions, 106 deletions
diff --git a/src/client/util/RecordingApi.ts b/src/client/util/RecordingApi.ts
index 021feee9a..ab6935e3b 100644
--- a/src/client/util/RecordingApi.ts
+++ b/src/client/util/RecordingApi.ts
@@ -3,8 +3,6 @@ import { IReactionDisposer, observable, reaction } from "mobx";
import { NumCast } from "../../fields/Types";
import { Doc } from "../../fields/Doc";
import { VideoBox } from "../views/nodes/VideoBox";
-import { scaleDiverging } from "d3-scale";
-import { Transform } from "./Transform";
type Movement = {
time: number,
@@ -13,17 +11,17 @@ type Movement = {
scale: number,
}
-type Presentation = {
- movements: Array<Movement> | null
- meta: Object,
+export type Presentation = {
+ movements: Movement[] | null,
+ totalTime: number,
+ meta: any,
}
export class RecordingApi {
- private static NULL_PRESENTATION: Presentation = {
- movements: null,
- meta: {},
- }
+ private static get NULL_PRESENTATION(): Presentation {
+ return { movements: null, meta: {}, totalTime: -1,}
+ }
// instance variables
private currentPresentation: Presentation;
@@ -50,6 +48,8 @@ export class RecordingApi {
// for now, set playFFView
this.playFFView = null;
this.timers = null;
+
+ // put a pointerdown event on the doucment to see what the target
}
// little helper :)
@@ -59,8 +59,9 @@ export class RecordingApi {
public start = (meta?: Object): Error | undefined => {
// check if already init a presentation
- if (!this.isInitPresenation) {
- console.error('[recordingApi.ts] start() failed: current presentation data exists. please call clear() first.')
+ if (!this.isInitPresenation) {
+ console.log(this.currentPresentation)
+ console.trace('[recordingApi.ts] start() failed: current presentation data exists. please call clear() first.')
return new Error('[recordingApi.ts] start()')
}
@@ -80,50 +81,61 @@ export class RecordingApi {
this.isRecording = true
}
- public clear = (): Error | Presentation => {
+ /* stops the video and returns the presentatation; if no presentation, returns undefined */
+ public getPresentation = (): undefined | Presentation => {
// TODO: maybe archive the data?
- if (this.isRecording) {
- console.error('[recordingApi.ts] clear() failed: currently recording presentation. call pause() first')
- return new Error('[recordingApi.ts] clear()')
- }
-
- // update the presentation mode
- Doc.UserDoc().presentationMode = 'none'
- // set the previus recording view to the play view
- this.playFFView = this.recordingFFView
-
- const presCopy = { ...this.currentPresentation }
-
- // clear presenation data
- this.currentPresentation = RecordingApi.NULL_PRESENTATION
- // clear isRecording
- this.isRecording = false
- // clear absoluteStart
- this.absoluteStart = -1
- // clear the disposeFunc
- this.removeRecordingFFView()
-
- return presCopy;
+ if (this.isRecording) console.warn('[recordingApi.ts] getPresentation() : currently recording presentation.');
+
+ // update the presentation mode
+ Doc.UserDoc().presentationMode = 'none';
+ // set the previus recording view to the play view
+ this.playFFView = this.recordingFFView;
+
+ // ensure we add the endTime now that they are done recording
+ return { ...this.currentPresentation, totalTime: new Date().getTime() - this.absoluteStart };
+ }
+
+ public stop = (): void => {
+ // make is recording false
+ this.isRecording = false
+ }
+
+ public clear = (): void => {
+ // clear presenation data
+ this.currentPresentation = RecordingApi.NULL_PRESENTATION
+ // clear isRecording
+ this.isRecording = false
+ // clear absoluteStart
+ this.absoluteStart = -1
+ // clear the disposeFunc
+ this.removeRecordingFFView()
+ }
+
+
+ // call on dispose function to stop tracking movements
+ public removeRecordingFFView = (): void => {
+ this.disposeFunc?.();
+ this.disposeFunc = null;
}
- public pause = (): Error | undefined => {
- if (this.isInitPresenation) {
- console.error('[recordingApi.ts] pause() failed: no presentation started. try calling init() first')
- return new Error('[recordingApi.ts] pause(): no presentation')
- }
- // don't allow track movments
- this.isRecording = false
-
- // set adjust absoluteStart to add the time difference
- const timestamp = new Date().getTime()
- this.absoluteStart = timestamp - this.absoluteStart
- }
+ // public pause = (): Error | undefined => {
+ // if (this.isInitPresenation) {
+ // console.error('[recordingApi.ts] pause() failed: no presentation started. try calling init() first')
+ // return new Error('[recordingApi.ts] pause(): no presentation')
+ // }
+ // // don't allow track movments
+ // this.isRecording = false
+
+ // // set adjust absoluteStart to add the time difference
+ // const timestamp = new Date().getTime()
+ // this.absoluteStart = timestamp - this.absoluteStart
+ // }
- public resume = () => {
- this.isRecording = true
- // set absoluteStart to the difference in time
- this.absoluteStart = new Date().getTime() - this.absoluteStart
- }
+ // public resume = () => {
+ // this.isRecording = true
+ // // set absoluteStart to the difference in time
+ // this.absoluteStart = new Date().getTime() - this.absoluteStart
+ // }
private trackMovements = (panX: number, panY: number, scale: number = 0): Error | undefined => {
// ensure we are recording
@@ -134,6 +146,8 @@ export class RecordingApi {
if (this.isInitPresenation) {
return new Error('[recordingApi.ts] trackMovements(): no presentation')
}
+
+ console.log('track movment')
// get the time
const time = new Date().getTime() - this.absoluteStart
@@ -164,12 +178,6 @@ export class RecordingApi {
this.recordingFFView = view;
}
- // call on dispose function to stop tracking movements
- public removeRecordingFFView = (): void => {
- this.disposeFunc?.();
- this.disposeFunc = null;
- }
-
// TODO: extract this into different class with pause and resume recording
// TODO: store the FFview with the movements
private playFFView: CollectionFreeFormView | null;
@@ -250,6 +258,37 @@ export class RecordingApi {
}, timeDiff)
})
}
+
+ // make a public method that concatenates the movements of the an array of presentations into one array
+ // TODO: consider the meta data of the presentations
+ public concatPresentations = (presentations: Presentation[]): Presentation => {
+ console.info(presentations);
+ if (presentations.length === 0) return RecordingApi.NULL_PRESENTATION;
+ const firstPresentation = presentations[0];
+
+ let sumTime = presentations[0].totalTime;
+ let combinedPresentations = { ...firstPresentation }
+ presentations.forEach((presentation, i) => {
+ // already consider the first presentation
+ if (i === 0) return;
+
+ const { movements, totalTime } = presentation;
+ if (movements === null) return;
+
+ // add the summed time to the movements
+ const addedTimeMovements = movements.map(move => { return { ...move, time: move.time + sumTime } });
+ // concat the movements already in the combined presentation with these new ones
+ const newMovements = [...combinedPresentations.movements || [], ...addedTimeMovements];
+
+ combinedPresentations = { ...combinedPresentations, movements: newMovements }
+
+ // update the totalTime
+ sumTime += totalTime;
+ });
+
+ // return the combined presentation with the updated total summed time
+ return { ...combinedPresentations, totalTime: sumTime };
+ }
// Unfinished code for tracing multiple free form views
// export let pres: Map<CollectionFreeFormView, IReactionDisposer> = new Map()
diff --git a/src/client/views/nodes/RecordingBox/ProgressBar.tsx b/src/client/views/nodes/RecordingBox/ProgressBar.tsx
index 493069394..1bb2b7c84 100644
--- a/src/client/views/nodes/RecordingBox/ProgressBar.tsx
+++ b/src/client/views/nodes/RecordingBox/ProgressBar.tsx
@@ -161,12 +161,12 @@ export function ProgressBar(props: ProgressBarProps) {
}
// pointerdown event for the progress bar
- const onPointerDown = (e: React.PointerEvent<HTMLDivElement>) => {
- // don't move the videobox element
- e.stopPropagation()
+ const onPointerDown = (e: React.PointerEvent<HTMLDivElement>) => {
+ // don't move the videobox element
+ e.stopPropagation();
// if recording, do nothing
- if (props.recording) return;
+ if (props.recording) return;
// get the segment the user clicked on to be dragged
const clickedSegment = e.target as HTMLDivElement & EventTarget
diff --git a/src/client/views/nodes/RecordingBox/RecordingBox.tsx b/src/client/views/nodes/RecordingBox/RecordingBox.tsx
index 5e97e3eb5..6fe67b6db 100644
--- a/src/client/views/nodes/RecordingBox/RecordingBox.tsx
+++ b/src/client/views/nodes/RecordingBox/RecordingBox.tsx
@@ -8,8 +8,8 @@ import { FieldView } from "../FieldView";
import { VideoBox } from "../VideoBox";
import { RecordingView } from './RecordingView';
import { DocumentType } from "../../../documents/DocumentTypes";
-import { RecordingApi } from "../../../util/RecordingApi";
-import { Doc, FieldsSym } from "../../../../fields/Doc";
+import { Presentation } from "../../../util/RecordingApi";
+import { Doc } from "../../../../fields/Doc";
import { Id } from "../../../../fields/FieldSymbols";
@@ -21,7 +21,7 @@ export class RecordingBox extends ViewBoxBaseComponent() {
private _ref: React.RefObject<HTMLDivElement> = React.createRef();
constructor(props: any) {
- super(props);
+ super(props);
}
componentDidMount() {
@@ -38,7 +38,7 @@ export class RecordingBox extends ViewBoxBaseComponent() {
}
@action
- setResult = (info: Upload.AccessPathInfo, trackScreen: boolean) => {
+ setResult = (info: Upload.AccessPathInfo, presentation?: Presentation) => {
this.result = info
this.dataDoc.type = DocumentType.VID;
this.dataDoc[this.fieldKey + "-duration"] = this.videoDuration;
@@ -47,9 +47,7 @@ export class RecordingBox extends ViewBoxBaseComponent() {
this.dataDoc[this.props.fieldKey] = new VideoField(this.result.accessPaths.client);
this.dataDoc[this.fieldKey + "-recorded"] = true;
// stringify the presenation and store it
- if (trackScreen) {
- this.dataDoc[this.fieldKey + "-presentation"] = JSON.stringify(RecordingApi.Instance.clear());
- }
+ presentation?.movements && (this.dataDoc[this.fieldKey + "-presentation"] = JSON.stringify(presentation));
}
render() {
diff --git a/src/client/views/nodes/RecordingBox/RecordingView.tsx b/src/client/views/nodes/RecordingBox/RecordingView.tsx
index 138e72274..208eaf45a 100644
--- a/src/client/views/nodes/RecordingBox/RecordingView.tsx
+++ b/src/client/views/nodes/RecordingBox/RecordingView.tsx
@@ -8,16 +8,17 @@ import { IconContext } from "react-icons";
import { Networking } from '../../../Network';
import { Upload } from '../../../../server/SharedMediaTypes';
import { returnFalse, returnTrue, setupMoveUpEvents } from '../../../../Utils';
-import { RecordingApi } from '../../../util/RecordingApi';
+import { Presentation, RecordingApi } from '../../../util/RecordingApi';
export interface MediaSegment {
videoChunks: any[],
endTime: number,
- startTime: number
+ startTime: number,
+ presenation?: Presentation,
}
interface IRecordingViewProps {
- setResult: (info: Upload.AccessPathInfo, trackScreen: boolean) => void
+ setResult: (info: Upload.AccessPathInfo, presentation?: Presentation) => void
setDuration: (seconds: number) => void
id: string
}
@@ -58,13 +59,18 @@ export function RecordingView(props: IRecordingViewProps) {
sampleRate: 44100
}
}
+
+ useEffect(() => console.info('progess', progress), [progress])
useEffect(() => {
- if (finished) {
+ if (finished) {
+ // make the total presentation that'll match the concatted video
+ const concatPres = trackScreen ? RecordingApi.Instance.concatPresentations(videos.map(v => v.presenation as Presentation)) : undefined;
+
// this async function uses the server to create the concatted video and then sets the result to it's accessPaths
(async () => {
- const videoFiles = videos.map((vid, i) => new File(vid.videoChunks, `segvideo${i}.mkv`, { type: vid.videoChunks[0].type, lastModified: Date.now() }));
+ const videoFiles = videos.map((vid, i) => new File(vid.videoChunks, `segvideo${i}.mkv`, { type: vid.videoChunks[0].type, lastModified: Date.now() }));
// upload the segments to the server and get their server access paths
const serverPaths: string[] = (await Networking.UploadFilesToServer(videoFiles))
@@ -72,7 +78,7 @@ export function RecordingView(props: IRecordingViewProps) {
// concat the segments together using post call
const result: Upload.AccessPathInfo | Error = await Networking.PostToServer('/concatVideos', serverPaths);
- !(result instanceof Error) ? props.setResult(result, trackScreen) : console.error("video conversion failed");
+ !(result instanceof Error) ? props.setResult(result, concatPres) : console.error("video conversion failed");
})();
}
}, [videos])
@@ -133,53 +139,70 @@ export function RecordingView(props: IRecordingViewProps) {
trackScreen && RecordingApi.Instance.start();
}
- videoRecorder.current.onstop = () => {
+ videoRecorder.current.onstop = () => {
+ RecordingApi.Instance.stop();
// if we have a last portion
- if (videoChunks.length > 1) {
+ if (videoChunks.length > 1) {
// append the current portion to the video pieces
- setVideos(videos => [...videos, { videoChunks: videoChunks, endTime: recordingTimerRef.current, startTime: videos?.lastElement()?.endTime || 0 }])
+ setVideos(videos => [...videos, {
+ videoChunks: videoChunks,
+ endTime: recordingTimerRef.current,
+ startTime: videos?.lastElement()?.endTime || 0,
+ // RecordingApi.stop() will return undefined if no track screen
+ presenation: RecordingApi.Instance.getPresentation()
+ }])
+ // now that we got the presentation data, we can clear for the next segment to be recorded
+ RecordingApi.Instance.clear();
}
- // reset the temporary chunks
- videoChunks = []
- setRecording(false);
- trackScreen && RecordingApi.Instance.pause();
+ // reset the temporary chunks
+ videoChunks = []
+ setRecording(false);
}
videoRecorder.current.start(200)
}
- const stop = (e: React.PointerEvent) => {
- e.stopPropagation();
- if (videoRecorder.current) {
- if (videoRecorder.current.state !== "inactive") {
- videoRecorder.current.stop();
- }
-
- // this will call upon progessbar to update videos to be in the correct order
- setFinished(true);
-
- // end the streams (audio/video) to remove recording icon
- const stream = videoElementRef.current!.srcObject;
- stream instanceof MediaStream && stream.getTracks().forEach(track => track.stop());
- }
+ const finish = (e: React.PointerEvent) => {
+ e.stopPropagation();
+ console.log('finish', videoRecorder.current)
+ // if inactive, then we're done recording all the segments
+ if (videoRecorder.current && videoRecorder.current.state !== "inactive") {
+ console.log('stopping recorder', videoRecorder.current?.state)
+
+
+
+ // call stop on the video recorder
+ videoRecorder.current?.stop();
+
+ // end the streams (audio/video) to remove recording icon
+ const stream = videoElementRef.current!.srcObject;
+ stream instanceof MediaStream && stream.getTracks().forEach(track => track.stop());
+
+ // clear the recoringApi - this is done in the stop method
+ // RecordingApi.Instance.clear();
+
+ // this will call upon progessbar to update videos to be in the correct order
+ console.log('setFinished to true', finished)
+ setFinished(true);
+ }
}
const pause = (e: React.PointerEvent) => {
- e.stopPropagation()
- if (videoRecorder.current) {
- if (videoRecorder.current.state === "recording") {
- videoRecorder.current.stop();
- }
- }
+ e.stopPropagation()
+ // if recording, then this is just a new segment
+ videoRecorder.current?.state === "recording" && videoRecorder.current.stop();
}
- const startOrResume = (e: React.PointerEvent) => {
+ const start = (e: React.PointerEvent) => {
// the code to start or resume does not get triggered if we start dragging the button
setupMoveUpEvents({}, e, returnTrue, returnFalse, e => {
- (!videoRecorder.current || videoRecorder.current.state === "inactive") && record();
- return true; // cancels propagation to documentView to avoid selecting it.
+ if (!videoRecorder.current || videoRecorder.current.state === "inactive") {
+ record();
+ // trackScreen && RecordingApi.Instance.start();
+ }
+ return true; // cancels propagation to documentView to avoid selecting it.
}, false, false);
}
@@ -218,7 +241,7 @@ export function RecordingView(props: IRecordingViewProps) {
<div className="record-button-wrapper">
{recording ?
<button className="stop-button" onPointerDown={pause} /> :
- <button className="record-button" onPointerDown={startOrResume} />
+ <button className="record-button" onPointerDown={start} />
}
</div>
@@ -229,7 +252,7 @@ export function RecordingView(props: IRecordingViewProps) {
<MdBackspace onPointerDown={undoPrevious} />
</IconContext.Provider>
<IconContext.Provider value={{ color: "#cc1c08", className: "video-edit-buttons" }}>
- <FaCheckCircle onPointerDown={stop} />
+ <FaCheckCircle onPointerDown={finish} />
</IconContext.Provider>
</div>