aboutsummaryrefslogtreecommitdiff
path: root/src/client
diff options
context:
space:
mode:
Diffstat (limited to 'src/client')
-rw-r--r--src/client/Network.ts3
-rw-r--r--src/client/util/RecordingApi.ts269
-rw-r--r--src/client/util/ReplayMovements.ts208
-rw-r--r--src/client/util/TrackMovements.ts257
-rw-r--r--src/client/views/Main.tsx6
-rw-r--r--src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx13
-rw-r--r--src/client/views/nodes/RecordingBox/ProgressBar.scss105
-rw-r--r--src/client/views/nodes/RecordingBox/ProgressBar.tsx328
-rw-r--r--src/client/views/nodes/RecordingBox/RecordingBox.tsx79
-rw-r--r--src/client/views/nodes/RecordingBox/RecordingView.scss33
-rw-r--r--src/client/views/nodes/RecordingBox/RecordingView.tsx223
-rw-r--r--src/client/views/nodes/VideoBox.tsx33
-rw-r--r--src/client/views/nodes/trails/PresElementBox.tsx4
13 files changed, 1044 insertions, 517 deletions
diff --git a/src/client/Network.ts b/src/client/Network.ts
index 3597e7b2b..b26f2458d 100644
--- a/src/client/Network.ts
+++ b/src/client/Network.ts
@@ -35,7 +35,7 @@ export namespace Networking {
const response = await fetch("/uploadFormData", parameters);
return response.json();
}
-
+
export async function UploadYoutubeToServer<T extends Upload.FileInformation = Upload.FileInformation>(videoId: string): Promise<Upload.FileResponse<T>[]> {
const parameters = {
method: 'POST',
@@ -45,5 +45,6 @@ export namespace Networking {
const response = await fetch("/uploadYoutubeVideo", parameters);
return response.json();
}
+
} \ No newline at end of file
diff --git a/src/client/util/RecordingApi.ts b/src/client/util/RecordingApi.ts
deleted file mode 100644
index 021feee9a..000000000
--- a/src/client/util/RecordingApi.ts
+++ /dev/null
@@ -1,269 +0,0 @@
-import { CollectionFreeFormView } from "../views/collections/collectionFreeForm";
-import { IReactionDisposer, observable, reaction } from "mobx";
-import { NumCast } from "../../fields/Types";
-import { Doc } from "../../fields/Doc";
-import { VideoBox } from "../views/nodes/VideoBox";
-import { scaleDiverging } from "d3-scale";
-import { Transform } from "./Transform";
-
-type Movement = {
- time: number,
- panX: number,
- panY: number,
- scale: number,
-}
-
-type Presentation = {
- movements: Array<Movement> | null
- meta: Object,
-}
-
-export class RecordingApi {
-
- private static NULL_PRESENTATION: Presentation = {
- movements: null,
- meta: {},
- }
-
- // instance variables
- private currentPresentation: Presentation;
- private isRecording: boolean;
- private absoluteStart: number;
-
-
- // create static instance and getter for global use
- @observable static _instance: RecordingApi;
- public static get Instance(): RecordingApi { return RecordingApi._instance }
- public constructor() {
- // init the global instance
- RecordingApi._instance = this;
-
- // init the instance variables
- this.currentPresentation = RecordingApi.NULL_PRESENTATION
- this.isRecording = false;
- this.absoluteStart = -1;
-
- // used for tracking movements in the view frame
- this.disposeFunc = null;
- this.recordingFFView = null;
-
- // for now, set playFFView
- this.playFFView = null;
- this.timers = null;
- }
-
- // little helper :)
- private get isInitPresenation(): boolean {
- return this.currentPresentation.movements === null
- }
-
- public start = (meta?: Object): Error | undefined => {
- // check if already init a presentation
- if (!this.isInitPresenation) {
- console.error('[recordingApi.ts] start() failed: current presentation data exists. please call clear() first.')
- return new Error('[recordingApi.ts] start()')
- }
-
- // update the presentation mode
- Doc.UserDoc().presentationMode = 'recording'
-
- // (1a) get start date for presenation
- const startDate = new Date()
- // (1b) set start timestamp to absolute timestamp
- this.absoluteStart = startDate.getTime()
-
- // (2) assign meta content if it exists
- this.currentPresentation.meta = meta || {}
- // (3) assign start date to currentPresenation
- this.currentPresentation.movements = []
- // (4) set isRecording true to allow trackMovements
- this.isRecording = true
- }
-
- public clear = (): Error | Presentation => {
- // TODO: maybe archive the data?
- if (this.isRecording) {
- console.error('[recordingApi.ts] clear() failed: currently recording presentation. call pause() first')
- return new Error('[recordingApi.ts] clear()')
- }
-
- // update the presentation mode
- Doc.UserDoc().presentationMode = 'none'
- // set the previus recording view to the play view
- this.playFFView = this.recordingFFView
-
- const presCopy = { ...this.currentPresentation }
-
- // clear presenation data
- this.currentPresentation = RecordingApi.NULL_PRESENTATION
- // clear isRecording
- this.isRecording = false
- // clear absoluteStart
- this.absoluteStart = -1
- // clear the disposeFunc
- this.removeRecordingFFView()
-
- return presCopy;
- }
-
- public pause = (): Error | undefined => {
- if (this.isInitPresenation) {
- console.error('[recordingApi.ts] pause() failed: no presentation started. try calling init() first')
- return new Error('[recordingApi.ts] pause(): no presentation')
- }
- // don't allow track movments
- this.isRecording = false
-
- // set adjust absoluteStart to add the time difference
- const timestamp = new Date().getTime()
- this.absoluteStart = timestamp - this.absoluteStart
- }
-
- public resume = () => {
- this.isRecording = true
- // set absoluteStart to the difference in time
- this.absoluteStart = new Date().getTime() - this.absoluteStart
- }
-
- private trackMovements = (panX: number, panY: number, scale: number = 0): Error | undefined => {
- // ensure we are recording
- if (!this.isRecording) {
- return new Error('[recordingApi.ts] trackMovements()')
- }
- // check to see if the presetation is init
- if (this.isInitPresenation) {
- return new Error('[recordingApi.ts] trackMovements(): no presentation')
- }
-
- // get the time
- const time = new Date().getTime() - this.absoluteStart
- // make new movement object
- const movement: Movement = { time, panX, panY, scale }
-
- // add that movement to the current presentation data's movement array
- this.currentPresentation.movements && this.currentPresentation.movements.push(movement)
- }
-
- // instance variable for the FFView
- private disposeFunc: IReactionDisposer | null;
- private recordingFFView: CollectionFreeFormView | null;
-
- // set the FFView that will be used in a reaction to track the movements
- public setRecordingFFView = (view: CollectionFreeFormView): void => {
- // set the view to the current view
- if (view === this.recordingFFView || view == null) return;
-
- // this.recordingFFView = view;
- // set the reaction to track the movements
- this.disposeFunc = reaction(
- () => ({ x: NumCast(view.Document.panX, -1), y: NumCast(view.Document.panY, -1), scale: NumCast(view.Document.viewScale, -1) }),
- (res) => (res.x !== -1 && res.y !== -1 && this.isRecording) && this.trackMovements(res.x, res.y, res.scale)
- )
-
- // for now, set the most recent recordingFFView to the playFFView
- this.recordingFFView = view;
- }
-
- // call on dispose function to stop tracking movements
- public removeRecordingFFView = (): void => {
- this.disposeFunc?.();
- this.disposeFunc = null;
- }
-
- // TODO: extract this into different class with pause and resume recording
- // TODO: store the FFview with the movements
- private playFFView: CollectionFreeFormView | null;
- private timers: NodeJS.Timeout[] | null;
-
- public setPlayFFView = (view: CollectionFreeFormView): void => {
- this.playFFView = view
- }
-
- // pausing movements will dispose all timers that are planned to replay the movements
- // play movemvents will recreate them when the user resumes the presentation
- public pauseMovements = (): undefined | Error => {
- if (this.playFFView === null) {
- return new Error('[recordingApi.ts] pauseMovements() failed: no view')
- }
-
- if (!this._isPlaying) {
- //return new Error('[recordingApi.ts] pauseMovements() failed: not playing')
- return
- }
- this._isPlaying = false
- // TODO: set userdoc presentMode to browsing
- this.timers?.map(timer => clearTimeout(timer))
-
- // this.videoBox = null;
- }
-
- private videoBox: VideoBox | null = null;
-
- // by calling pause on the VideoBox, the pauseMovements will be called
- public pauseVideoAndMovements = (): boolean => {
- this.videoBox?.Pause()
-
- this.pauseMovements()
- return this.videoBox == null
- }
-
- public _isPlaying = false;
-
- public playMovements = (presentation: Presentation, timeViewed: number = 0, videoBox?: VideoBox): undefined | Error => {
- if (presentation.movements === null || this.playFFView === null) {
- return new Error('[recordingApi.ts] followMovements() failed: no presentation data or no view')
- }
- if(this._isPlaying) return;
-
- this._isPlaying = true;
- Doc.UserDoc().presentationMode = 'watching';
-
- // TODO: consider this bug at the end of the clip on seek
- this.videoBox = videoBox || null;
-
- // only get the movements that are remaining in the video time left
- const filteredMovements = presentation.movements.filter(movement => movement.time > timeViewed * 1000)
-
- // helper to replay a movement
- const document = this.playFFView
- let preScale = -1;
- const zoomAndPan = (movement: Movement) => {
- const { panX, panY, scale } = movement;
- (scale !== -1 && preScale !== scale) && document.zoomSmoothlyAboutPt([panX, panY], scale, 0);
- document.Document._panX = panX;
- document.Document._panY = panY;
-
- preScale = scale;
- }
-
- // set the first frame to be at the start of the pres
- zoomAndPan(filteredMovements[0]);
-
- // make timers that will execute each movement at the correct replay time
- this.timers = filteredMovements.map(movement => {
- const timeDiff = movement.time - timeViewed*1000
- return setTimeout(() => {
- // replay the movement
- zoomAndPan(movement)
- // if last movement, presentation is done -> set the instance var
- if (movement === filteredMovements[filteredMovements.length - 1]) RecordingApi.Instance._isPlaying = false;
- }, timeDiff)
- })
- }
-
- // Unfinished code for tracing multiple free form views
- // export let pres: Map<CollectionFreeFormView, IReactionDisposer> = new Map()
-
- // export function AddRecordingFFView(ffView: CollectionFreeFormView): void {
- // pres.set(ffView,
- // reaction(() => ({ x: ffView.panX, y: ffView.panY }),
- // (pt) => RecordingApi.trackMovements(ffView, pt.x, pt.y)))
- // )
- // }
-
- // export function RemoveRecordingFFView(ffView: CollectionFreeFormView): void {
- // const disposer = pres.get(ffView);
- // disposer?.();
- // pres.delete(ffView)
- // }
-}
diff --git a/src/client/util/ReplayMovements.ts b/src/client/util/ReplayMovements.ts
new file mode 100644
index 000000000..86bc4c5de
--- /dev/null
+++ b/src/client/util/ReplayMovements.ts
@@ -0,0 +1,208 @@
+import { CollectionFreeFormView } from "../views/collections/collectionFreeForm";
+import { IReactionDisposer, observable, observe, reaction } from "mobx";
+import { Doc } from "../../fields/Doc";
+import { VideoBox } from "../views/nodes/VideoBox";
+import { DocumentManager } from "./DocumentManager";
+import { CollectionDockingView } from "../views/collections/CollectionDockingView";
+import { DocServer } from "../DocServer";
+import { Movement, Presentation } from "./TrackMovements";
+
+export class ReplayMovements {
+ private timers: NodeJS.Timeout[] | null;
+ private videoBoxDisposeFunc: IReactionDisposer | null;
+ private videoBox: VideoBox | null;
+ private isPlaying: boolean;
+
+
+ // create static instance and getter for global use
+ @observable static _instance: ReplayMovements;
+ static get Instance(): ReplayMovements { return ReplayMovements._instance }
+ constructor() {
+ // init the global instance
+ ReplayMovements._instance = this;
+
+ // instance vars for replaying
+ this.timers = null;
+ this.videoBoxDisposeFunc = null;
+ this.videoBox = null;
+ this.isPlaying = false;
+ }
+
+ // pausing movements will dispose all timers that are planned to replay the movements
+ // play movemvents will recreate them when the user resumes the presentation
+ pauseMovements = (): undefined | Error => {
+ if (!this.isPlaying) {
+ // console.warn('[recordingApi.ts] pauseMovements(): already on paused');
+ return;
+ }
+ Doc.UserDoc().presentationMode = 'none';
+
+ this.isPlaying = false
+ // TODO: set userdoc presentMode to browsing
+ this.timers?.map(timer => clearTimeout(timer))
+ }
+
+ setVideoBox = async (videoBox: VideoBox) => {
+ // console.info('setVideoBox', videoBox);
+ if (this.videoBox !== null) { console.warn('setVideoBox on already videoBox'); }
+ if (this.videoBoxDisposeFunc !== null) { console.warn('setVideoBox on already videoBox dispose func'); this.videoBoxDisposeFunc(); }
+
+
+ const { presentation } = videoBox;
+ if (presentation == null) { console.warn('setVideoBox on null videoBox presentation'); return; }
+
+ let docIdtoDoc: Map<string, Doc> = new Map();
+ try {
+ docIdtoDoc = await this.loadPresentation(presentation);
+ } catch {
+ console.error('[recordingApi.ts] setVideoBox(): error loading presentation - no replay movements');
+ throw 'error loading docs from server';
+ }
+
+
+ this.videoBoxDisposeFunc =
+ reaction(() => ({ playing: videoBox._playing, timeViewed: videoBox.player?.currentTime || 0 }),
+ ({ playing, timeViewed }) =>
+ playing ? this.playMovements(presentation, docIdtoDoc, timeViewed) : this.pauseMovements()
+ );
+ this.videoBox = videoBox;
+ }
+
+ removeVideoBox = () => {
+ if (this.videoBoxDisposeFunc == null) { console.warn('removeVideoBox on null videoBox'); return; }
+ this.videoBoxDisposeFunc();
+
+ this.videoBox = null;
+ this.videoBoxDisposeFunc = null;
+ }
+
+ // should be called from interacting with the screen
+ pauseFromInteraction = () => {
+ this.videoBox?.Pause();
+
+ this.pauseMovements();
+ }
+
+ loadPresentation = async (presentation: Presentation) => {
+ const { movements } = presentation;
+ if (movements === null) {
+ throw '[recordingApi.ts] followMovements() failed: no presentation data';
+ }
+
+ // generate a set of all unique docIds
+ const docIds = new Set<string>();
+ for (const {docId} of movements) {
+ if (!docIds.has(docId)) docIds.add(docId);
+ }
+
+ const docIdtoDoc = new Map<string, Doc>();
+
+ let refFields = await DocServer.GetRefFields([...docIds.keys()]);
+ for (const docId in refFields) {
+ if (!refFields[docId]) {
+ throw `one field was undefined`;
+ }
+ docIdtoDoc.set(docId, refFields[docId] as Doc);
+ }
+ // console.info('loadPresentation refFields', refFields, docIdtoDoc);
+
+ return docIdtoDoc;
+ }
+
+ // returns undefined if the docView isn't open on the screen
+ getCollectionFFView = (docId: string) => {
+ const isInView = DocumentManager.Instance.getDocumentViewById(docId);
+ if (isInView) { return isInView.ComponentView as CollectionFreeFormView; }
+ }
+
+ // will open the doc in a tab then return the CollectionFFView that holds it
+ openTab = (docId: string, docIdtoDoc: Map<string, Doc>) => {
+ const doc = docIdtoDoc.get(docId);
+ if (doc == undefined) {
+ console.error(`docIdtoDoc did not contain docId ${docId}`)
+ return undefined;
+ }
+ // console.log('openTab', docId, doc);
+ CollectionDockingView.AddSplit(doc, 'right');
+ const docView = DocumentManager.Instance.getDocumentView(doc);
+ // BUG - this returns undefined if the doc is already open
+ return docView?.ComponentView as CollectionFreeFormView;
+ }
+
+ // helper to replay a movement
+ zoomAndPan = (movement: Movement, document: CollectionFreeFormView) => {
+ const { panX, panY, scale } = movement;
+ scale !== 0 && document.zoomSmoothlyAboutPt([panX, panY], scale, 0);
+ document.Document._panX = panX;
+ document.Document._panY = panY;
+ }
+
+ getFirstMovements = (movements: Movement[]): Map<string, Movement> => {
+ if (movements === null) return new Map();
+ // generate a set of all unique docIds
+ const docIdtoFirstMove = new Map();
+ for (const move of movements) {
+ const { docId } = move;
+ if (!docIdtoFirstMove.has(docId)) docIdtoFirstMove.set(docId, move);
+ }
+ return docIdtoFirstMove;
+ }
+
+ endPlayingPresentation = () => {
+ this.isPlaying = false;
+ Doc.UserDoc().presentationMode = 'none';
+ }
+
+ public playMovements = (presentation: Presentation, docIdtoDoc: Map<string, Doc>, timeViewed: number = 0) => {
+ // console.info('playMovements', presentation, timeViewed, docIdtoDoc);
+
+ if (presentation.movements === null || presentation.movements.length === 0) { //|| this.playFFView === null) {
+ return new Error('[recordingApi.ts] followMovements() failed: no presentation data')
+ }
+ if (this.isPlaying) return;
+
+ this.isPlaying = true;
+ Doc.UserDoc().presentationMode = 'watching';
+
+ // only get the movements that are remaining in the video time left
+ const filteredMovements = presentation.movements.filter(movement => movement.time > timeViewed * 1000)
+
+ const handleFirstMovements = () => {
+ // if the first movement is a closed tab, open it
+ const firstMovement = filteredMovements[0];
+ const isClosed = this.getCollectionFFView(firstMovement.docId) === undefined;
+ if (isClosed) this.openTab(firstMovement.docId, docIdtoDoc);
+
+ // for the open tabs, set it to the first move
+ const docIdtoFirstMove = this.getFirstMovements(filteredMovements);
+ for (const [docId, firstMove] of docIdtoFirstMove) {
+ const colFFView = this.getCollectionFFView(docId);
+ if (colFFView) this.zoomAndPan(firstMove, colFFView);
+ }
+ }
+ handleFirstMovements();
+
+
+ // make timers that will execute each movement at the correct replay time
+ this.timers = filteredMovements.map(movement => {
+ const timeDiff = movement.time - timeViewed * 1000
+
+ return setTimeout(() => {
+ const collectionFFView = this.getCollectionFFView(movement.docId);
+ if (collectionFFView) {
+ this.zoomAndPan(movement, collectionFFView);
+ } else {
+ // tab wasn't open - open it and play the movement
+ const openedColFFView = this.openTab(movement.docId, docIdtoDoc);
+ console.log('openedColFFView', openedColFFView);
+ openedColFFView && this.zoomAndPan(movement, openedColFFView);
+ }
+
+ // if last movement, presentation is done -> cleanup :)
+ if (movement === filteredMovements[filteredMovements.length - 1]) {
+ this.endPlayingPresentation();
+ }
+ }, timeDiff);
+ });
+ }
+}
diff --git a/src/client/util/TrackMovements.ts b/src/client/util/TrackMovements.ts
new file mode 100644
index 000000000..d512e4802
--- /dev/null
+++ b/src/client/util/TrackMovements.ts
@@ -0,0 +1,257 @@
+import { IReactionDisposer, observable, observe, reaction } from "mobx";
+import { NumCast } from "../../fields/Types";
+import { Doc, DocListCast } from "../../fields/Doc";
+import { CollectionDockingView } from "../views/collections/CollectionDockingView";
+import { Id } from "../../fields/FieldSymbols";
+
+export type Movement = {
+ time: number,
+ panX: number,
+ panY: number,
+ scale: number,
+ docId: string,
+}
+
+export type Presentation = {
+ movements: Movement[] | null,
+ totalTime: number,
+ meta: Object | Object[],
+}
+
+export class TrackMovements {
+
+ private static get NULL_PRESENTATION(): Presentation {
+ return { movements: null, meta: {}, totalTime: -1, }
+ }
+
+ // instance variables
+ private currentPresentation: Presentation;
+ private tracking: boolean;
+ private absoluteStart: number;
+ // instance variable for holding the FFViews and their disposers
+ private recordingFFViews: Map<string, IReactionDisposer> | null;
+ private tabChangeDisposeFunc: IReactionDisposer | null;
+
+
+ // create static instance and getter for global use
+ @observable static _instance: TrackMovements;
+ static get Instance(): TrackMovements { return TrackMovements._instance }
+ constructor() {
+ // init the global instance
+ TrackMovements._instance = this;
+
+ // init the instance variables
+ this.currentPresentation = TrackMovements.NULL_PRESENTATION
+ this.tracking = false;
+ this.absoluteStart = -1;
+
+ // used for tracking movements in the view frame
+ this.recordingFFViews = null;
+ this.tabChangeDisposeFunc = null;
+ }
+
+ // little helper :)
+ private get nullPresentation(): boolean {
+ return this.currentPresentation.movements === null
+ }
+
+ private addRecordingFFView(doc: Doc, key: string = doc[Id]): void {
+ // console.info('adding dispose func : docId', key, 'doc', doc);
+
+ if (this.recordingFFViews === null) { console.warn('addFFView on null RecordingApi'); return; }
+ if (this.recordingFFViews.has(key)) { console.warn('addFFView : key already in map'); return; }
+
+ const disposeFunc = reaction(
+ () => ({ x: NumCast(doc.panX, -1), y: NumCast(doc.panY, -1), scale: NumCast(doc.viewScale, 0)}),
+ (res) => (res.x !== -1 && res.y !== -1 && this.tracking) && this.trackMovement(res.x, res.y, key, res.scale),
+ );
+ this.recordingFFViews?.set(key, disposeFunc);
+ }
+
+ private removeRecordingFFView = (key: string) => {
+ // console.info('removing dispose func : docId', key);
+ if (this.recordingFFViews === null) { console.warn('removeFFView on null RecordingApi'); return; }
+ this.recordingFFViews.get(key)?.();
+ this.recordingFFViews.delete(key);
+ }
+
+ // in the case where only one tab was changed (updates not across dashboards), set only one to true
+ private updateRecordingFFViewsFromTabs = (tabbedDocs: Doc[], onlyOne = false) => {
+ if (this.recordingFFViews === null) return;
+
+ // so that the size comparisons are correct, we must filter to only the FFViews
+ const isFFView = (doc: Doc) => doc && 'viewType' in doc && doc.viewType === 'freeform';
+ const tabbedFFViews = new Set<string>();
+ for (const DashDoc of tabbedDocs) {
+ if (isFFView(DashDoc)) tabbedFFViews.add(DashDoc[Id]);
+ }
+
+
+ // new tab was added - need to add it
+ if (tabbedFFViews.size > this.recordingFFViews.size) {
+ for (const DashDoc of tabbedDocs) {
+ if (!this.recordingFFViews.has(DashDoc[Id])) {
+ if (isFFView(DashDoc)) {
+ this.addRecordingFFView(DashDoc);
+
+ // only one max change, so return
+ if (onlyOne) return;
+ }
+ }
+ }
+ }
+ // tab was removed - need to remove it from recordingFFViews
+ else if (tabbedFFViews.size < this.recordingFFViews.size) {
+ for (const [key] of this.recordingFFViews) {
+ if (!tabbedFFViews.has(key)) {
+ this.removeRecordingFFView(key);
+ if (onlyOne) return;
+ }
+ }
+ }
+ }
+
+ private initTabTracker = () => {
+ if (this.recordingFFViews === null) {
+ this.recordingFFViews = new Map();
+ }
+
+ // init the dispose funcs on the page
+ const docList = DocListCast(CollectionDockingView.Instance.props.Document.data);
+ this.updateRecordingFFViewsFromTabs(docList);
+
+ // create a reaction to monitor changes in tabs
+ this.tabChangeDisposeFunc =
+ reaction(() => CollectionDockingView.Instance.props.Document.data,
+ (change) => {
+ // TODO: consider changing between dashboards
+ // console.info('change in tabs', change);
+ this.updateRecordingFFViewsFromTabs(DocListCast(change), true);
+ });
+ }
+
+ start = (meta?: Object) => {
+ this.initTabTracker();
+
+ // update the presentation mode
+ Doc.UserDoc().presentationMode = 'recording';
+
+ // (1a) get start date for presenation
+ const startDate = new Date();
+ // (1b) set start timestamp to absolute timestamp
+ this.absoluteStart = startDate.getTime();
+
+ // (2) assign meta content if it exists
+ this.currentPresentation.meta = meta || {}
+ // (3) assign start date to currentPresenation
+ this.currentPresentation.movements = []
+ // (4) set tracking true to allow trackMovements
+ this.tracking = true
+ }
+
+ /* stops the video and returns the presentatation; if no presentation, returns undefined */
+ yieldPresentation(clearData: boolean = true): Presentation | null {
+ // if no presentation or done tracking, return null
+ if (this.nullPresentation || !this.tracking) return null;
+
+ // set the previus recording view to the play view
+ // this.playFFView = this.recordingFFView;
+
+ // ensure we add the endTime now that they are done recording
+ const cpy = { ...this.currentPresentation, totalTime: new Date().getTime() - this.absoluteStart };
+
+ // reset the current presentation
+ clearData && this.clear();
+
+ // console.info('yieldPresentation', cpy);
+ return cpy;
+ }
+
+ finish = (): void => {
+ // make is tracking false
+ this.tracking = false
+ // reset the RecordingApi instance
+ this.clear();
+ }
+
+ private clear = (): void => {
+ // clear the disposeFunc if we are done (not tracking)
+ if (!this.tracking) {
+ this.removeAllRecordingFFViews();
+ this.tabChangeDisposeFunc?.();
+ // update the presentation mode now that we are done tracking
+ Doc.UserDoc().presentationMode = 'none';
+
+ this.recordingFFViews = null;
+ this.tabChangeDisposeFunc = null;
+ }
+
+ // clear presenation data
+ this.currentPresentation = TrackMovements.NULL_PRESENTATION
+ // clear absoluteStart
+ this.absoluteStart = -1
+ }
+
+ removeAllRecordingFFViews = () => {
+ if (this.recordingFFViews === null) { console.warn('removeAllFFViews on null RecordingApi'); return; }
+
+ for (const [id, disposeFunc] of this.recordingFFViews) {
+ // console.info('calling dispose func : docId', id);
+ disposeFunc();
+ this.recordingFFViews.delete(id);
+ }
+ }
+
+ private trackMovement = (panX: number, panY: number, docId: string, scale: number = 0) => {
+ // ensure we are recording to track
+ if (!this.tracking) {
+ console.error('[recordingApi.ts] trackMovements(): tracking is false')
+ return;
+ }
+ // check to see if the presetation is init - if not, we are between segments
+ // TODO: make this more clear - tracking should be "live tracking", not always true when the recording api being used (between start and yieldPres)
+ // bacuse tracking should be false inbetween segments high key
+ if (this.nullPresentation) {
+ console.warn('[recordingApi.ts] trackMovements(): trying to store movemetns between segments')
+ return;
+ }
+
+ // get the time
+ const time = new Date().getTime() - this.absoluteStart
+ // make new movement object
+ const movement: Movement = { time, panX, panY, scale, docId }
+
+ // add that movement to the current presentation data's movement array
+ this.currentPresentation.movements && this.currentPresentation.movements.push(movement)
+ }
+
+
+ // method that concatenates an array of presentatations into one
+ public concatPresentations = (presentations: Presentation[]): Presentation => {
+ // these three will lead to the combined presentation
+ let combinedMovements: Movement[] = [];
+ let sumTime = 0;
+ let combinedMetas: any[] = [];
+
+ presentations.forEach((presentation) => {
+ const { movements, totalTime, meta } = presentation;
+
+ // update movements if they had one
+ if (movements) {
+ // add the summed time to the movements
+ const addedTimeMovements = movements.map(move => { return { ...move, time: move.time + sumTime } });
+ // concat the movements already in the combined presentation with these new ones
+ combinedMovements.push(...addedTimeMovements);
+ }
+
+ // update the totalTime
+ sumTime += totalTime;
+
+ // concatenate the metas
+ combinedMetas.push(meta);
+ });
+
+ // return the combined presentation with the updated total summed time
+ return { movements: combinedMovements, totalTime: sumTime, meta: combinedMetas };
+ }
+}
diff --git a/src/client/views/Main.tsx b/src/client/views/Main.tsx
index 49c2dcf34..acc74e914 100644
--- a/src/client/views/Main.tsx
+++ b/src/client/views/Main.tsx
@@ -8,7 +8,8 @@ import { AssignAllExtensions } from "../../extensions/General/Extensions";
import { Docs } from "../documents/Documents";
import { CurrentUserUtils } from "../util/CurrentUserUtils";
import { LinkManager } from "../util/LinkManager";
-import { RecordingApi } from "../util/RecordingApi";
+import { ReplayMovements } from '../util/ReplayMovements';
+import { TrackMovements } from "../util/TrackMovements";
import { CollectionView } from "./collections/CollectionView";
import { DashboardView } from './DashboardView';
import { MainView } from "./MainView";
@@ -38,6 +39,7 @@ AssignAllExtensions();
const expires = "expires=" + d.toUTCString();
document.cookie = `loadtime=${loading};${expires};path=/`;
new LinkManager();
- new RecordingApi;
+ new TrackMovements();
+ new ReplayMovements();
ReactDOM.render(<MainView />, document.getElementById('root'));
})(); \ No newline at end of file
diff --git a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx
index 542b1fce1..1320785a9 100644
--- a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx
+++ b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx
@@ -26,7 +26,6 @@ import { DragManager, dropActionType } from "../../../util/DragManager";
import { HistoryUtil } from "../../../util/History";
import { InteractionUtils } from "../../../util/InteractionUtils";
import { LinkManager } from "../../../util/LinkManager";
-import { RecordingApi } from "../../../util/RecordingApi";
import { ScriptingGlobals } from "../../../util/ScriptingGlobals";
import { SearchUtil } from "../../../util/SearchUtil";
import { SelectionManager } from "../../../util/SelectionManager";
@@ -59,6 +58,7 @@ import "./CollectionFreeFormView.scss";
import { MarqueeView } from "./MarqueeView";
import React = require("react");
import e = require("connect-flash");
+import { ReplayMovements } from "../../../util/ReplayMovements";
export type collectionFreeformViewProps = {
@@ -998,15 +998,8 @@ export class CollectionFreeFormView extends CollectionSubView<Partial<collection
@action
setPan(panX: number, panY: number, panTime: number = 0, clamp: boolean = false) {
- // set the current respective FFview to the tab being panned.
- (Doc.UserDoc()?.presentationMode === 'recording') && RecordingApi.Instance.setRecordingFFView(this);
- // TODO: make this based off the specific recording FFView
- (Doc.UserDoc()?.presentationMode === 'none') && RecordingApi.Instance.setPlayFFView(this);
- if (Doc.UserDoc()?.presentationMode === 'watching') {
- RecordingApi.Instance.pauseVideoAndMovements();
- Doc.UserDoc().presentationMode = 'none';
- // RecordingApi.Instance.pauseMovements()
- }
+ // this is the easiest way to do this -> will talk with Bob about using mobx to do this to remove this line of code.
+ if (Doc.UserDoc()?.presentationMode === 'watching') ReplayMovements.Instance.pauseFromInteraction();
if (!this.isAnnotationOverlay && clamp) {
// this section wraps the pan position, horizontally and/or vertically whenever the content is panned out of the viewing bounds
diff --git a/src/client/views/nodes/RecordingBox/ProgressBar.scss b/src/client/views/nodes/RecordingBox/ProgressBar.scss
index a493b0b89..28ad25ffa 100644
--- a/src/client/views/nodes/RecordingBox/ProgressBar.scss
+++ b/src/client/views/nodes/RecordingBox/ProgressBar.scss
@@ -1,11 +1,19 @@
.progressbar {
+ touch-action: none;
+ vertical-align: middle;
+ text-align: center;
+
+ align-items: center;
+ cursor: default;
+
+
position: absolute;
display: flex;
justify-content: flex-start;
- bottom: 10px;
- width: 80%;
- height: 5px;
+ bottom: 2px;
+ width: 99%;
+ height: 30px;
background-color: gray;
&.done {
@@ -23,4 +31,93 @@
z-index: 3;
pointer-events: none;
}
-} \ No newline at end of file
+}
+
+.progressbar-disabled {
+ cursor: not-allowed;
+}
+
+.progressbar-dragging {
+ cursor: grabbing;
+}
+
+// citation: https://codepen.io/_Master_/pen/PRdjmQ
+@keyframes blinker {
+ from {opacity: 1.0;}
+ to {opacity: 0.0;}
+}
+.blink {
+ text-decoration: blink;
+ animation-name: blinker;
+ animation-duration: 0.6s;
+ animation-iteration-count:infinite;
+ animation-timing-function:ease-in-out;
+ animation-direction: alternate;
+}
+
+.segment {
+ border: 3px solid black;
+ background-color: red;
+ margin: 1px;
+ padding: 0;
+ cursor: pointer;
+ transition-duration: .5s;
+ user-select: none;
+
+ vertical-align: middle;
+ text-align: center;
+}
+
+.segment-expanding {
+border-color: red;
+ background-color: white;
+ transition-duration: 0s;
+ opacity: .75;
+ pointer-events: none;
+}
+
+.segment-expanding:hover {
+ background-color: inherit;
+ cursor: not-allowed;
+}
+
+.segment-disabled {
+ pointer-events: none;
+ opacity: 0.5;
+ transition-duration: 0s;
+ /* Hide the text. */
+ text-indent: 100%;
+ white-space: nowrap;
+ overflow: hidden;
+}
+
+.segment-hide {
+ background-color: inherit;
+ text-align: center;
+ vertical-align: middle;
+ user-select: none;
+}
+
+.segment:first-child {
+ margin-left: 2px;
+}
+.segment:last-child {
+ margin-right: 2px;
+}
+
+.segment:hover {
+ background-color: white;
+}
+
+.segment:hover, .segment-selected {
+ margin: 0px;
+ border: 4px solid red;
+ border-radius: 2px;
+}
+
+.segment-selected {
+ border: 4px solid #202020;
+ background-color: red;
+ opacity: .75;
+ cursor: grabbing;
+}
diff --git a/src/client/views/nodes/RecordingBox/ProgressBar.tsx b/src/client/views/nodes/RecordingBox/ProgressBar.tsx
index 82d5e1f04..1bb2b7c84 100644
--- a/src/client/views/nodes/RecordingBox/ProgressBar.tsx
+++ b/src/client/views/nodes/RecordingBox/ProgressBar.tsx
@@ -1,45 +1,301 @@
import * as React from 'react';
-import { useEffect } from "react"
+import { useEffect, useState, useCallback, useRef } from "react"
import "./ProgressBar.scss"
+import { MediaSegment } from './RecordingView';
interface ProgressBarProps {
- progress: number,
- marks: number[],
+ videos: MediaSegment[],
+ setVideos: React.Dispatch<React.SetStateAction<MediaSegment[]>>,
+ orderVideos: boolean,
+ progress: number,
+ recording: boolean,
+ doUndo: boolean,
+ setCanUndo?: React.Dispatch<React.SetStateAction<boolean>>,
+}
+
+interface SegmentBox {
+ endTime: number,
+ startTime: number,
+ order: number,
+}
+interface CurrentHover {
+ index: number,
+ minX: number,
+ maxX: number
}
export function ProgressBar(props: ProgressBarProps) {
+ const progressBarRef = useRef<HTMLDivElement | null>(null)
+
+ // the actual list of JSX elements rendered as segments
+ const [segments, setSegments] = useState<JSX.Element[]>([]);
+ // array for the order of video segments
+ const [ordered, setOrdered] = useState<SegmentBox[]>([]);
+
+ const [undoStack, setUndoStack] = useState<SegmentBox[]>([]);
+
+ // -1 if no segment is currently being dragged around; else, it is the id of that segment over
+ // NOTE: the id of a segment is its index in the ordered array
+ const [dragged, setDragged] = useState<number>(-1);
+
+ // length of the time removed from the video, in seconds*100
+ const [totalRemovedTime, setTotalRemovedTime] = useState<number>(0);
+
+ // this holds the index of the videoc segment to be removed
+ const [removed, setRemoved] = useState<number>(-1);
+
+ // update the canUndo props based on undo stack
+ useEffect(() => props.setCanUndo?.(undoStack.length > 0), [undoStack.length]);
+
+ // useEffect for undo - brings back the most recently deleted segment
+ useEffect(() => handleUndo(), [props.doUndo])
+ const handleUndo = () => {
+ // get the last element from the undo if it exists
+ if (undoStack.length === 0) return;
+ // get and remove the last element from the undo stack
+ const last = undoStack.lastElement();
+ setUndoStack(prevUndo => prevUndo.slice(0, -1));
+
+ // update the removed time and place element back into ordered
+ setTotalRemovedTime(prevRemoved => prevRemoved - (last.endTime - last.startTime));
+ setOrdered(prevOrdered => [...prevOrdered, last]);
+ }
+
+ // useEffect for recording changes - changes style to disabled and adds the "expanding-segment"
+ useEffect(() => {
+ // get segments segment's html using it's id -> make them appeared disabled (or enabled)
+ segments.forEach((seg) => document.getElementById(seg.props.id)?.classList.toggle('segment-disabled', props.recording));
+ progressBarRef.current?.classList.toggle('progressbar-disabled', props.recording);
+
+ if (props.recording)
+ setSegments(prevSegments => [...prevSegments, <div key='segment-expanding' id='segment-expanding' className='segment segment-expanding blink' style={{ width: 'fit-content' }}>{props.videos.length + 1}</div>]);
+ }, [props.recording])
+
+
+ // useEffect that updates the segmentsJSX, which is rendered
+ // only updated when ordered is updated or if the user is dragging around a segment
+ useEffect(() => {
+ const totalTime = props.progress * 1000 - totalRemovedTime;
+ const segmentsJSX = ordered.map((seg, i) =>
+ <div key={`segment-${i}`} id={`segment-${i}`} className={dragged === i ? 'segment-hide' : 'segment'} style={{ width: `${((seg.endTime - seg.startTime) / totalTime) * 100}%` }}>{seg.order + 1}</div>);
+
+ setSegments(segmentsJSX)
+ }, [dragged, ordered]);
+
+ // useEffect for dragged - update the cursor to be grabbing while grabbing
+ useEffect(() => {
+ progressBarRef.current?.classList.toggle('progressbar-dragging', dragged !== -1);
+ }, [dragged]);
+
+ // to imporve performance, only want to update the CSS width, not re-render the whole JSXList
+ useEffect(() => {
+ if (!props.recording) return
+ const totalTime = props.progress * 1000 - totalRemovedTime;
+ let remainingTime = totalTime;
+ segments.forEach((seg, i) => {
+ // for the last segment, we need to set that directly
+ if (i === segments.length - 1) return;
+ // update remaining time
+ remainingTime -= (ordered[i].endTime - ordered[i].startTime);
+
+ // update the width for this segment
+ const htmlId = seg.props.id;
+ const segmentHtml = document.getElementById(htmlId);
+ if (segmentHtml) segmentHtml.style.width = `${((ordered[i].endTime - ordered[i].startTime) / totalTime) * 100}%`;
+ });
+
+ // update the width of the expanding segment using the remaining time
+ const segExapandHtml = document.getElementById('segment-expanding');
+ if (segExapandHtml)
+ segExapandHtml.style.width = ordered.length === 0 ? '100%' : `${(remainingTime / totalTime) * 100}%`;
+ }, [props.progress]);
+
+ // useEffect for props.videos - update the ordered array when a new video is added
+ useEffect(() => {
+ // this useEffect fired when the videos are being rearragned to the order
+ // in this case, do nothing.
+ if (props.orderVideos) return;
+
+ const order = props.videos.length - 1;
+ // in this case, a new video is added -> push it onto ordered
+ if (order >= ordered.length) {
+ const { endTime, startTime } = props.videos.lastElement();
+ setOrdered(prevOrdered => {
+ return [...prevOrdered, { endTime, startTime, order }];
+ });
+ }
+
+ // in this case, a video is removed
+ else if (order < ordered.length) {
+ console.warn('warning: video removed from parent');
+ }
+ }, [props.videos]);
+
+ // useEffect for props.orderVideos - matched the order array with the videos array before the export
+ useEffect(() => props.setVideos(vids => ordered.map((seg) => vids[seg.order])), [props.orderVideos]);
+
+ // useEffect for removed - handles logic for removing a segment
+ useEffect(() => {
+ if (removed === -1) return;
+ // update total removed time
+ setTotalRemovedTime(prevRemoved => prevRemoved + (ordered[removed].endTime - ordered[removed].startTime));
+
+ // put the element on the undo stack
+ setUndoStack(prevUndo => [...prevUndo, ordered[removed]]);
+ // remove the segment from the array
+ setOrdered(prevOrdered => prevOrdered.filter((seg, i) => i !== removed));
+ // reset to default/nullish state
+ setRemoved(-1);
+ }, [removed]);
+
+ // returns the new currentHover based on the new index
+ const updateCurrentHover = (segId: number): CurrentHover | null => {
+ // get the segId of the segment that will become the new bounding area
+ const rect = progressBarRef.current?.children[segId].getBoundingClientRect()
+ if (rect == null) return null
+ return {
+ index: segId,
+ minX: rect.x,
+ maxX: rect.x + rect.width,
+ }
+ }
+
+ // pointerdown event for the progress bar
+ const onPointerDown = (e: React.PointerEvent<HTMLDivElement>) => {
+ // don't move the videobox element
+ e.stopPropagation();
+
+ // if recording, do nothing
+ if (props.recording) return;
+
+ // get the segment the user clicked on to be dragged
+ const clickedSegment = e.target as HTMLDivElement & EventTarget
+
+ // get the profess bar ro add event listeners
+ // don't do anything if null
+ const progressBar = progressBarRef.current
+ if (progressBar == null || clickedSegment.id === progressBar.id) return
+
+ // if holding shift key, let's remove that segment
+ if (e.shiftKey) {
+ const segId = parseInt(clickedSegment.id.split('-')[1]);
+ setRemoved(segId);
+ return
+ }
+
+ // if holding ctrl key and click, let's undo that segment #hiddenfeature lol
+ if (e.ctrlKey) {
+ handleUndo();
+ return;
+ }
+
+ // if we're here, the user is dragging a segment around
+ // let the progress bar capture all the pointer events until the user releases (pointerUp)
+ const ptrId = e.pointerId;
+ progressBar.setPointerCapture(ptrId)
+
+ const rect = clickedSegment.getBoundingClientRect()
+ // id for segment is like 'segment-1' or 'segment-10',
+ // so this works to get the id
+ const segId = parseInt(clickedSegment.id.split('-')[1])
+ // set the selected segment to be the one dragged
+ setDragged(segId)
+
+ // this is the logic for storing the lower X bound and upper X bound to know
+ // whether a swap is needed between two segments
+ let currentHover: CurrentHover = {
+ index: segId,
+ minX: rect.x,
+ maxX: rect.x + rect.width,
+ }
+
+ // create the floating segment that tracks the cursor
+ const detchedSegment = document.createElement("div")
+ initDeatchSegment(detchedSegment, rect);
+
+ const updateSegmentOrder = (event: PointerEvent): void => {
+ event.stopPropagation();
+ event.preventDefault();
+
+ // this fixes a bug where pointerup doesn't fire while cursor is upped while being dragged
+ if (!progressBar.hasPointerCapture(ptrId)) {
+ placeSegmentandCleanup();
+ return;
+ }
+
+ followCursor(event, detchedSegment);
+
+ const curX = event.clientX;
+ // handle the left bound
+ if (curX < currentHover.minX && currentHover.index > 0) {
+ swapSegments(currentHover.index, currentHover.index - 1)
+ currentHover = updateCurrentHover(currentHover.index - 1) ?? currentHover
+ }
+ // handle the right bound
+ else if (curX > currentHover.maxX && currentHover.index < segments.length - 1) {
+ swapSegments(currentHover.index, currentHover.index + 1)
+ currentHover = updateCurrentHover(currentHover.index + 1) ?? currentHover
+ }
+ }
+
+ // handles when the user is done dragging the segment (pointerUp)
+ const placeSegmentandCleanup = (event?: PointerEvent): void => {
+ event?.stopPropagation();
+ event?.preventDefault();
+ // if they put the segment outside of the bounds, remove it
+ if (event && (event.clientX < 0 || event.clientX > document.body.clientWidth || event.clientY < 0 || event.clientY > document.body.clientHeight))
+ setRemoved(currentHover.index);
+
+ // remove the update event listener for pointermove
+ progressBar.removeEventListener('pointermove', updateSegmentOrder);
+ // remove the floating segment from the DOM
+ detchedSegment.remove();
+ // dragged is -1 is equiv to nothing being dragged, so the normal state
+ // so this will place the segment in it's location and update the segment bar
+ setDragged(-1);
+ }
+
+ // event listeners that allow the user to drag and release the floating segment
+ progressBar.addEventListener('pointermove', updateSegmentOrder);
+ progressBar.addEventListener('pointerup', placeSegmentandCleanup, { once: true });
+ }
+
+ const swapSegments = (oldIndex: number, newIndex: number) => {
+ if (newIndex == null) return;
+ setOrdered(prevOrdered => {
+ const temp = { ...prevOrdered[oldIndex] }
+ prevOrdered[oldIndex] = prevOrdered[newIndex]
+ prevOrdered[newIndex] = temp
+ return prevOrdered
+ });
+ // update visually where the segment is hovering over
+ setDragged(newIndex);
+ }
+
+ // functions for the floating segment that tracks the cursor while grabbing it
+ const initDeatchSegment = (dot: HTMLDivElement, rect: DOMRect) => {
+ dot.classList.add("segment-selected");
+ dot.style.transitionDuration = '0s';
+ dot.style.position = 'absolute';
+ dot.style.zIndex = '999';
+ dot.style.width = `${rect.width}px`;
+ dot.style.height = `${rect.height}px`;
+ dot.style.left = `${rect.x}px`;
+ dot.style.top = `${rect.y}px`;
+ dot.draggable = false;
+ document.body.append(dot);
+ }
+ const followCursor = (event: PointerEvent, dot: HTMLDivElement): void => {
+ // event.stopPropagation()
+ const { width, height } = dot.getBoundingClientRect();
+ dot.style.left = `${event.clientX - width / 2}px`;
+ dot.style.top = `${event.clientY - height / 2}px`;
+ }
+
- // const handleClick = (e: React.MouseEvent) => {
- // let progressbar = document.getElementById('progressbar')!
- // let bounds = progressbar!.getBoundingClientRect();
- // let x = e.clientX - bounds.left;
- // let percent = x / progressbar.clientWidth * 100
-
- // for (let i = 0; i < props.marks.length; i++) {
- // let start = i == 0 ? 0 : props.marks[i-1];
- // if (percent > start && percent < props.marks[i]) {
- // props.playSegment(i)
- // // console.log(i)
- // // console.log(percent)
- // // console.log(props.marks[i])
- // break
- // }
- // }
- // }
-
- return (
- <div className="progressbar" id="progressbar">
- <div
- className="progressbar done"
- style={{ width: `${props.progress}%` }}
- // onClick={handleClick}
- ></div>
- {props.marks.map((mark) => {
- return <div
- className="progressbar mark"
- style={{ width: `${mark}%` }}
- ></div>
- })}
- </div>
- )
+ return (
+ <div className="progressbar" id="progressbar" onPointerDown={onPointerDown} ref={progressBarRef}>
+ {segments}
+ </div>
+ )
} \ No newline at end of file
diff --git a/src/client/views/nodes/RecordingBox/RecordingBox.tsx b/src/client/views/nodes/RecordingBox/RecordingBox.tsx
index 10393624b..0ff7c4292 100644
--- a/src/client/views/nodes/RecordingBox/RecordingBox.tsx
+++ b/src/client/views/nodes/RecordingBox/RecordingBox.tsx
@@ -8,54 +8,51 @@ import { FieldView } from "../FieldView";
import { VideoBox } from "../VideoBox";
import { RecordingView } from './RecordingView';
import { DocumentType } from "../../../documents/DocumentTypes";
-import { RecordingApi } from "../../../util/RecordingApi";
-import { Doc, FieldsSym } from "../../../../fields/Doc";
+import { Presentation } from "../../../util/TrackMovements";
+import { Doc } from "../../../../fields/Doc";
import { Id } from "../../../../fields/FieldSymbols";
@observer
export class RecordingBox extends ViewBoxBaseComponent() {
- public static LayoutString(fieldKey: string) { return FieldView.LayoutString(RecordingBox, fieldKey); }
+ public static LayoutString(fieldKey: string) { return FieldView.LayoutString(RecordingBox, fieldKey); }
- private _ref: React.RefObject<HTMLDivElement> = React.createRef();
+ private _ref: React.RefObject<HTMLDivElement> = React.createRef();
- constructor(props: any) {
+ constructor(props: any) {
super(props);
- }
-
- componentDidMount() {
- console.log("set native width and height")
- Doc.SetNativeWidth(this.dataDoc, 1280);
- Doc.SetNativeHeight(this.dataDoc, 720);
- }
-
- @observable result: Upload.FileInformation | undefined = undefined
- @observable videoDuration: number | undefined = undefined
-
- @action
- setVideoDuration = (duration: number) => {
- this.videoDuration = duration
- }
-
- @action
- setResult = (info: Upload.FileInformation, trackScreen: boolean) => {
- this.result = info
- this.dataDoc.type = DocumentType.VID;
- this.dataDoc[this.fieldKey + "-duration"] = this.videoDuration;
-
- this.dataDoc.layout = VideoBox.LayoutString(this.fieldKey);
- this.dataDoc[this.props.fieldKey] = new VideoField(this.result.accessPaths.agnostic.client);
- this.dataDoc[this.fieldKey + "-recorded"] = true;
- // stringify the presenation and store it
- if (trackScreen) {
- this.dataDoc[this.fieldKey + "-presentation"] = JSON.stringify(RecordingApi.Instance.clear());
- }
- }
-
- render() {
- return <div className="recordingBox" ref={this._ref}>
- {!this.result && <RecordingView setResult={this.setResult} setDuration={this.setVideoDuration} id={Doc.GetProto(this.rootDoc)[Id]} />}
- </div>;
- }
+ }
+
+ componentDidMount() {
+ Doc.SetNativeWidth(this.dataDoc, 1280);
+ Doc.SetNativeHeight(this.dataDoc, 720);
+ }
+
+ @observable result: Upload.AccessPathInfo | undefined = undefined
+ @observable videoDuration: number | undefined = undefined
+
+ @action
+ setVideoDuration = (duration: number) => {
+ this.videoDuration = duration
+ }
+
+ @action
+ setResult = (info: Upload.AccessPathInfo, presentation?: Presentation) => {
+ this.result = info
+ this.dataDoc.type = DocumentType.VID;
+ this.dataDoc[this.fieldKey + "-duration"] = this.videoDuration;
+
+ this.dataDoc.layout = VideoBox.LayoutString(this.fieldKey);
+ this.dataDoc[this.props.fieldKey] = new VideoField(this.result.accessPaths.client);
+ this.dataDoc[this.fieldKey + "-recorded"] = true;
+ // stringify the presentation and store it
+ presentation?.movements && (this.dataDoc[this.fieldKey + "-presentation"] = JSON.stringify(presentation));
+ }
+
+ render() {
+ return <div className="recordingBox" ref={this._ref}>
+ {!this.result && <RecordingView setResult={this.setResult} setDuration={this.setVideoDuration} id={this.rootDoc.proto?.[Id] || ''} />}
+ </div>;
+ }
}
diff --git a/src/client/views/nodes/RecordingBox/RecordingView.scss b/src/client/views/nodes/RecordingBox/RecordingView.scss
index 9b2f6d070..2e6f6bc26 100644
--- a/src/client/views/nodes/RecordingBox/RecordingView.scss
+++ b/src/client/views/nodes/RecordingBox/RecordingView.scss
@@ -18,13 +18,13 @@ button {
width: 100%;
// display: flex;
pointer-events: all;
- background-color: grey;
+ background-color: black;
}
.video-wrapper {
// max-width: 600px;
// max-width: 700px;
- position: relative;
+ // position: relative;
display: flex;
justify-content: center;
// overflow: hidden;
@@ -33,7 +33,7 @@ button {
}
.video-wrapper:hover .controls {
- bottom: 30px;
+ bottom: 34.5px;
transform: translateY(0%);
opacity: 100%;
}
@@ -43,8 +43,8 @@ button {
align-items: center;
justify-content: space-evenly;
position: absolute;
- padding: 14px;
- width: 100%;
+ // padding: 14px;
+ //width: 100%;
max-width: 500px;
// max-height: 20%;
flex-wrap: wrap;
@@ -56,7 +56,14 @@ button {
// transform: translateY(150%);
transition: all 0.3s ease-in-out;
// opacity: 0%;
- bottom: 30px;
+ bottom: 34.5px;
+ height: 60px;
+ right: 2px;
+ // bottom: -150px;
+}
+
+.controls:active {
+ bottom: 40px;
// bottom: -150px;
}
@@ -127,9 +134,8 @@ button {
.controls-inner-container {
display: flex;
flex-direction: row;
- justify-content: center;
- width: 100%;
-
+ align-content: center;
+ position: relative;
}
.record-button-wrapper {
@@ -180,14 +186,14 @@ button {
height: 100%;
display: flex;
flex-direction: row;
- align-items: center;
- position: absolute;
+ align-content: center;
+ position: relative;
top: 0;
bottom: 0;
&.video-edit-wrapper {
- right: 50% - 15;
+ // right: 50% - 15;
.track-screen {
font-weight: 200;
@@ -197,10 +203,11 @@ button {
&.track-screen-wrapper {
- right: 50% - 30;
+ // right: 50% - 30;
.track-screen {
font-weight: 200;
+ color: aqua;
}
}
diff --git a/src/client/views/nodes/RecordingBox/RecordingView.tsx b/src/client/views/nodes/RecordingBox/RecordingView.tsx
index b95335792..83ed6914e 100644
--- a/src/client/views/nodes/RecordingBox/RecordingView.tsx
+++ b/src/client/views/nodes/RecordingBox/RecordingView.tsx
@@ -1,23 +1,24 @@
import * as React from 'react';
import "./RecordingView.scss";
-import { ReactElement, useCallback, useEffect, useRef, useState } from "react";
+import { useEffect, useRef, useState } from "react";
import { ProgressBar } from "./ProgressBar"
import { MdBackspace } from 'react-icons/md';
import { FaCheckCircle } from 'react-icons/fa';
import { IconContext } from "react-icons";
import { Networking } from '../../../Network';
import { Upload } from '../../../../server/SharedMediaTypes';
+import { returnFalse, returnTrue, setupMoveUpEvents } from '../../../../Utils';
+import { Presentation, TrackMovements } from '../../../util/TrackMovements';
-import { RecordingApi } from '../../../util/RecordingApi';
-import { emptyFunction, returnFalse, returnTrue, setupMoveUpEvents } from '../../../../Utils';
-
-interface MediaSegment {
+export interface MediaSegment {
videoChunks: any[],
- endTime: number
+ endTime: number,
+ startTime: number,
+ presentation?: Presentation,
}
interface IRecordingViewProps {
- setResult: (info: Upload.FileInformation, trackScreen: boolean) => void
+ setResult: (info: Upload.AccessPathInfo, presentation?: Presentation) => void
setDuration: (seconds: number) => void
id: string
}
@@ -32,7 +33,13 @@ export function RecordingView(props: IRecordingViewProps) {
const [playing, setPlaying] = useState(false);
const [progress, setProgress] = useState(0);
+ // acts as a "refresh state" to tell progressBar when to undo
+ const [doUndo, setDoUndo] = useState(false);
+ // whether an undo can occur or not
+ const [canUndo, setCanUndo] = useState(false);
+
const [videos, setVideos] = useState<MediaSegment[]>([]);
+ const [orderVideos, setOrderVideos] = useState<boolean>(false);
const videoRecorder = useRef<MediaRecorder | null>(null);
const videoElementRef = useRef<HTMLVideoElement | null>(null);
@@ -45,53 +52,42 @@ export function RecordingView(props: IRecordingViewProps) {
video: {
width: 1280,
height: 720,
+
},
audio: {
echoCancellation: true,
noiseSuppression: true,
sampleRate: 44100
}
- }
+ };
useEffect(() => {
-
if (finished) {
- props.setDuration(recordingTimer * 100)
- let allVideoChunks: any = []
- videos.forEach((vid) => {
- console.log(vid.videoChunks)
- allVideoChunks = allVideoChunks.concat(vid.videoChunks)
- })
-
- const videoFile = new File(allVideoChunks, "video.mkv", { type: allVideoChunks[0].type, lastModified: Date.now() });
-
- Networking.UploadFilesToServer(videoFile)
- .then((data) => {
- const result = data[0].result
- if (!(result instanceof Error)) { // convert this screenshotBox into normal videoBox
- props.setResult(result, trackScreen)
- } else {
- alert("video conversion failed");
- }
- })
-
- }
+ // make the total presentation that'll match the concatted video
+ let concatPres = trackScreen && TrackMovements.Instance.concatPresentations(videos.map(v => v.presentation as Presentation));
+ // this async function uses the server to create the concatted video and then sets the result to it's accessPaths
+ (async () => {
+ const videoFiles = videos.map((vid, i) => new File(vid.videoChunks, `segvideo${i}.mkv`, { type: vid.videoChunks[0].type, lastModified: Date.now() }));
- }, [finished])
+ // upload the segments to the server and get their server access paths
+ const serverPaths: string[] = (await Networking.UploadFilesToServer(videoFiles))
+ .map(res => (res.result instanceof Error) ? '' : res.result.accessPaths.agnostic.server)
- useEffect(() => {
- // check if the browser supports media devices on first load
- if (!navigator.mediaDevices) {
- console.log('This browser does not support getUserMedia.')
+ // concat the segments together using post call
+ const result: Upload.AccessPathInfo | Error = await Networking.PostToServer('/concatVideos', serverPaths);
+ !(result instanceof Error) ? props.setResult(result, concatPres || undefined) : console.error("video conversion failed");
+ })();
}
- console.log('This device has the correct media devices.')
- }, [])
+ }, [videos]);
+ // this will call upon the progress bar to edit videos to be in the correct order
useEffect(() => {
- // get access to the video element on every render
- videoElementRef.current = document.getElementById(`video-${props.id}`) as HTMLVideoElement;
- })
+ finished && setOrderVideos(true);
+ }, [finished]);
+
+ // check if the browser supports media devices on first load
+ useEffect(() => { if (!navigator.mediaDevices) alert('This browser does not support getUserMedia.'); }, []);
useEffect(() => {
let interval: any = null;
@@ -103,121 +99,108 @@ export function RecordingView(props: IRecordingViewProps) {
clearInterval(interval);
}
return () => clearInterval(interval);
- }, [recording])
+ }, [recording]);
useEffect(() => {
setVideoProgressHelper(recordingTimer)
recordingTimerRef.current = recordingTimer;
- }, [recordingTimer])
+ }, [recordingTimer]);
const setVideoProgressHelper = (progress: number) => {
const newProgress = (progress / MAXTIME) * 100;
- setProgress(newProgress)
+ setProgress(newProgress);
}
+
const startShowingStream = async (mediaConstraints = DEFAULT_MEDIA_CONSTRAINTS) => {
- const stream = await navigator.mediaDevices.getUserMedia(mediaConstraints)
+ const stream = await navigator.mediaDevices.getUserMedia(mediaConstraints);
- videoElementRef.current!.src = ""
- videoElementRef.current!.srcObject = stream
- videoElementRef.current!.muted = true
+ videoElementRef.current!.src = "";
+ videoElementRef.current!.srcObject = stream;
+ videoElementRef.current!.muted = true;
- return stream
+ return stream;
}
const record = async () => {
- const stream = await startShowingStream();
- videoRecorder.current = new MediaRecorder(stream)
+ // don't need to start a new stream every time we start recording a new segment
+ if (!videoRecorder.current) videoRecorder.current = new MediaRecorder(await startShowingStream());
// temporary chunks of video
- let videoChunks: any = []
+ let videoChunks: any = [];
videoRecorder.current.ondataavailable = (event: any) => {
- if (event.data.size > 0) {
- videoChunks.push(event.data)
- }
- }
+ if (event.data.size > 0) videoChunks.push(event.data);
+ };
videoRecorder.current.onstart = (event: any) => {
setRecording(true);
- trackScreen && RecordingApi.Instance.start();
- }
+ // start the recording api when the video recorder starts
+ trackScreen && TrackMovements.Instance.start();
+ };
videoRecorder.current.onstop = () => {
// if we have a last portion
if (videoChunks.length > 1) {
// append the current portion to the video pieces
- setVideos(videos => [...videos, { videoChunks: videoChunks, endTime: recordingTimerRef.current }])
+ const nextVideo = {
+ videoChunks,
+ endTime: recordingTimerRef.current,
+ startTime: videos?.lastElement()?.endTime || 0
+ };
+
+ // depending on if a presenation exists, add it to the video
+ const presentation = TrackMovements.Instance.yieldPresentation();
+ setVideos(videos => [...videos, (presentation != null && trackScreen) ? { ...nextVideo, presentation } : nextVideo]);
}
// reset the temporary chunks
- videoChunks = []
+ videoChunks = [];
setRecording(false);
- setFinished(true);
- trackScreen && RecordingApi.Instance.pause();
}
- // recording paused
- videoRecorder.current.onpause = (event: any) => {
- // append the current portion to the video pieces
- setVideos(videos => [...videos, { videoChunks: videoChunks, endTime: recordingTimerRef.current }])
+ videoRecorder.current.start(200);
+ }
- // reset the temporary chunks
- videoChunks = []
- setRecording(false);
- trackScreen && RecordingApi.Instance.pause();
- }
- videoRecorder.current.onresume = async (event: any) => {
- await startShowingStream();
- setRecording(true);
- trackScreen && RecordingApi.Instance.resume();
- }
+ // if this is called, then we're done recording all the segments
+ const finish = (e: React.PointerEvent) => {
+ e.stopPropagation();
- videoRecorder.current.start(200)
- }
+ // call stop on the video recorder if active
+ videoRecorder.current?.state !== "inactive" && videoRecorder.current?.stop();
+ // end the streams (audio/video) to remove recording icon
+ const stream = videoElementRef.current!.srcObject;
+ stream instanceof MediaStream && stream.getTracks().forEach(track => track.stop());
- const stop = () => {
- if (videoRecorder.current) {
- if (videoRecorder.current.state !== "inactive") {
- videoRecorder.current.stop();
- // recorder.current.stream.getTracks().forEach((track: any) => track.stop())
- }
- }
+ // finish/clear the recoringApi
+ TrackMovements.Instance.finish();
+
+ // this will call upon progessbar to update videos to be in the correct order
+ setFinished(true);
}
- const pause = () => {
- if (videoRecorder.current) {
- if (videoRecorder.current.state === "recording") {
- videoRecorder.current.pause();
- }
- }
+ const pause = (e: React.PointerEvent) => {
+ e.stopPropagation();
+ // if recording, then this is just a new segment
+ videoRecorder.current?.state === "recording" && videoRecorder.current.stop();
}
- const startOrResume = (e: React.PointerEvent) => {
- // the code to start or resume does not get triggered if we start dragging the button
+ const start = (e: React.PointerEvent) => {
setupMoveUpEvents({}, e, returnTrue, returnFalse, e => {
- if (!videoRecorder.current || videoRecorder.current.state === "inactive") {
- record();
- } else if (videoRecorder.current.state === "paused") {
- videoRecorder.current.resume();
- }
+ // start recording if not already recording
+ if (!videoRecorder.current || videoRecorder.current.state === "inactive") record();
+
return true; // cancels propagation to documentView to avoid selecting it.
}, false, false);
}
- const clearPrevious = () => {
- const numVideos = videos.length
- setRecordingTimer(numVideos == 1 ? 0 : videos[numVideos - 2].endTime)
- setVideoProgressHelper(numVideos == 1 ? 0 : videos[numVideos - 2].endTime)
- setVideos(videos.filter((_, idx) => idx !== numVideos - 1));
+ const undoPrevious = (e: React.PointerEvent) => {
+ e.stopPropagation();
+ setDoUndo(prev => !prev);
}
- const handleOnTimeUpdate = () => {
- if (playing) {
- setVideoProgressHelper(videoElementRef.current!.currentTime)
- }
- };
+ const handleOnTimeUpdate = () => { playing && setVideoProgressHelper(videoElementRef.current!.currentTime); };
const millisecondToMinuteSecond = (milliseconds: number) => {
const toTwoDigit = (digit: number) => {
@@ -234,7 +217,8 @@ export function RecordingView(props: IRecordingViewProps) {
<video id={`video-${props.id}`}
autoPlay
muted
- onTimeUpdate={handleOnTimeUpdate}
+ onTimeUpdate={() => handleOnTimeUpdate()}
+ ref={videoElementRef}
/>
<div className="recording-sign">
<span className="dot" />
@@ -246,18 +230,18 @@ export function RecordingView(props: IRecordingViewProps) {
<div className="record-button-wrapper">
{recording ?
<button className="stop-button" onPointerDown={pause} /> :
- <button className="record-button" onPointerDown={startOrResume} />
+ <button className="record-button" onPointerDown={start} />
}
</div>
{!recording && (videos.length > 0 ?
<div className="options-wrapper video-edit-wrapper">
- {/* <IconContext.Provider value={{ color: "grey", className: "video-edit-buttons" }}>
- <MdBackspace onClick={clearPrevious} />
- </IconContext.Provider> */}
+ <IconContext.Provider value={{ color: "grey", className: "video-edit-buttons", style: { display: canUndo ? 'inherit' : 'none' } }}>
+ <MdBackspace onPointerDown={undoPrevious} />
+ </IconContext.Provider>
<IconContext.Provider value={{ color: "#cc1c08", className: "video-edit-buttons" }}>
- <FaCheckCircle onClick={stop} />
+ <FaCheckCircle onPointerDown={finish} />
</IconContext.Provider>
</div>
@@ -271,12 +255,17 @@ export function RecordingView(props: IRecordingViewProps) {
</div>
- <ProgressBar
- progress={progress}
- marks={videos.map((elt) => elt.endTime / MAXTIME * 100)}
- // playSegment={playSegment}
- />
</div>
+
+ <ProgressBar
+ videos={videos}
+ setVideos={setVideos}
+ orderVideos={orderVideos}
+ progress={progress}
+ recording={recording}
+ doUndo={doUndo}
+ setCanUndo={setCanUndo}
+ />
</div>
</div>)
} \ No newline at end of file
diff --git a/src/client/views/nodes/VideoBox.tsx b/src/client/views/nodes/VideoBox.tsx
index 1b891034f..e833c7e30 100644
--- a/src/client/views/nodes/VideoBox.tsx
+++ b/src/client/views/nodes/VideoBox.tsx
@@ -15,7 +15,6 @@ import { DocumentType } from "../../documents/DocumentTypes";
import { Networking } from "../../Network";
import { CurrentUserUtils } from "../../util/CurrentUserUtils";
import { DocumentManager } from "../../util/DocumentManager";
-import { RecordingApi } from "../../util/RecordingApi";
import { SelectionManager } from "../../util/SelectionManager";
import { SnappingManager } from "../../util/SnappingManager";
import { undoBatch } from "../../util/UndoManager";
@@ -29,8 +28,10 @@ import { MarqueeAnnotator } from "../MarqueeAnnotator";
import { AnchorMenu } from "../pdf/AnchorMenu";
import { StyleProp } from "../StyleProvider";
import { FieldView, FieldViewProps } from './FieldView';
-import { RecordingBox } from "./RecordingBox/RecordingBox";
import "./VideoBox.scss";
+import { Presentation } from "../../util/TrackMovements";
+import { RecordingBox } from "./RecordingBox";
+import { ReplayMovements } from "../../util/ReplayMovements";
const path = require('path');
/**
@@ -150,6 +151,10 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
this.player && this.setPlayheadTime(0);
document.addEventListener("keydown", this.keyEvents, true);
+
+ if (this.presentation) {
+ ReplayMovements.Instance.setVideoBox(this);
+ }
}
componentWillUnmount() {
@@ -157,6 +162,10 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this.Pause();
Object.keys(this._disposers).forEach(d => this._disposers[d]?.());
document.removeEventListener("keydown", this.keyEvents, true);
+
+ if (this.presentation) {
+ ReplayMovements.Instance.removeVideoBox();
+ }
}
// handles key events, when timeline scrubs fade controls
@@ -183,20 +192,6 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
@action public Play = (update: boolean = true) => {
if (this._playRegionTimer) return;
- // if (Doc.UserDoc().presentationMode === 'watching' && !this._playing) {
- // console.log('VideoBox : Play : presentation mode', this._playing);
- // return;
- // }
-
- // if presentation isn't null, call followmovements on the recording api
- if (this.presentation) {
- // console.log("presentation isn't null")
- const err = RecordingApi.Instance.playMovements(this.presentation, this.player?.currentTime || 0, this);
- err && console.log(err)
- } else {
- // console.log("presentation is null")
- }
-
this._playing = true;
const eleTime = this.player?.currentTime || 0;
if (this.timeline) {
@@ -238,12 +233,6 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
// pauses video
@action public Pause = (update: boolean = true) => {
- if (this.presentation) {
- console.log('VideoBox : Pause');
- const err = RecordingApi.Instance.pauseMovements();
- err && console.log(err);
- }
-
this._playing = false;
this.removeCurrentlyPlaying();
try {
diff --git a/src/client/views/nodes/trails/PresElementBox.tsx b/src/client/views/nodes/trails/PresElementBox.tsx
index 1a2f4b93f..a4c69f66b 100644
--- a/src/client/views/nodes/trails/PresElementBox.tsx
+++ b/src/client/views/nodes/trails/PresElementBox.tsx
@@ -361,8 +361,8 @@ export class PresElementBox extends ViewBoxBaseComponent<FieldViewProps>() {
} else {
// if we dont have any recording
const recording = Docs.Create.WebCamDocument("", {
- _width: 400, _height: 200,
- // hideDocumentButtonBar: true,
+ _width: 384, _height: 216,
+ hideDocumentButtonBar: true,
hideDecorationTitle: true,
hideOpenButton: true,
// hideDeleteButton: true,