From bafc47fe100002e37c7abcada3dc44f3bfb66f62 Mon Sep 17 00:00:00 2001 From: bobzel Date: Thu, 23 Sep 2021 11:16:53 -0400 Subject: a bunch of fixes to audio timelines to support undoing clips, simplifying audioWaveform and having it recompute when a clip is made. --- src/client/views/AudioWaveform.tsx | 117 ++++++++++++++----------------------- 1 file changed, 45 insertions(+), 72 deletions(-) (limited to 'src/client/views/AudioWaveform.tsx') diff --git a/src/client/views/AudioWaveform.tsx b/src/client/views/AudioWaveform.tsx index 8f3b7c2cd..1e676e1f0 100644 --- a/src/client/views/AudioWaveform.tsx +++ b/src/client/views/AudioWaveform.tsx @@ -1,6 +1,6 @@ import React = require("react"); import axios from "axios"; -import { action, computed } from "mobx"; +import { action, computed, reaction, IReactionDisposer } from "mobx"; import { observer } from "mobx-react"; import Waveform from "react-audio-waveform"; import { Doc } from "../../fields/Doc"; @@ -12,7 +12,8 @@ import "./AudioWaveform.scss"; import { Colors } from "./global/globalEnums"; export interface AudioWaveformProps { - duration: number; + duration: number; // length of media clip + rawDuration: number; // length of underlying media data mediaPath: string; layoutDoc: Doc; trimming: boolean; @@ -22,19 +23,28 @@ export interface AudioWaveformProps { @observer export class AudioWaveform extends React.Component { public static NUMBER_OF_BUCKETS = 100; + _disposer: IReactionDisposer | undefined; @computed get _waveHeight() { return Math.max(50, this.props.PanelHeight()); } + + @computed get clipStart() { return NumCast(this.props.layoutDoc.clipStart); } + @computed get clipEnd() { return NumCast(this.props.layoutDoc.clipEnd, this.props.rawDuration); } + @computed get audioBuckets() { return Cast(this.props.layoutDoc.audioBuckets, listSpec("number"), []); } + @computed get audioBucketRange() { return Cast(this.props.layoutDoc.audioBucketRange, listSpec("number"), [-1, -1]); } + componentWillUnmount() { + this._disposer?.(); + } componentDidMount() { - const audioBuckets = Cast( - this.props.layoutDoc.audioBuckets, - listSpec("number"), - [] - ); - if (!audioBuckets.length) { - this.props.layoutDoc.audioBuckets = new List([0, 0]); /// "lock" to prevent other views from computing the same data - setTimeout(this.createWaveformBuckets); - } + this._disposer = reaction(() => [this.clipStart, this.clipEnd, this.audioBuckets.length, ...this.audioBucketRange], + (range) => { + if (range[2] !== AudioWaveform.NUMBER_OF_BUCKETS || range[3] !== range[0] || range[4] !== range[1]) { + this.props.layoutDoc.audioBucketRange = new List([range[0], range[1]]); // setting these values here serves as a "lock" to prevent multiple attempts to create the waveform at nearly the same time. + this.props.layoutDoc.audioBuckets = new List(numberRange(AudioWaveform.NUMBER_OF_BUCKETS)); + setTimeout(this.createWaveformBuckets); + } + }, { fireImmediately: true }); + } // decodes the audio file into peaks for generating the waveform @@ -45,81 +55,44 @@ export class AudioWaveform extends React.Component { context.decodeAudioData( response.data, action((buffer) => { - const decodedAudioData = buffer.getChannelData(0); + const rawDecodedAudioData = buffer.getChannelData(0); + const startInd = this.clipStart / this.props.rawDuration; + const endInd = this.clipEnd / this.props.rawDuration; + const decodedAudioData = rawDecodedAudioData.slice(Math.floor(startInd * rawDecodedAudioData.length), Math.floor(endInd * rawDecodedAudioData.length)); const bucketDataSize = Math.floor( decodedAudioData.length / AudioWaveform.NUMBER_OF_BUCKETS ); const brange = Array.from(Array(bucketDataSize)); - this.props.layoutDoc.audioBuckets = new List( - numberRange(AudioWaveform.NUMBER_OF_BUCKETS).map( - (i: number) => - brange.reduce( - (p, x, j) => - Math.abs( - Math.max(p, decodedAudioData[i * bucketDataSize + j]) - ), - 0 - ) / 2 - ) + const bucketList = numberRange(AudioWaveform.NUMBER_OF_BUCKETS).map( + (i: number) => + brange.reduce( + (p, x, j) => + Math.abs( + Math.max(p, decodedAudioData[i * bucketDataSize + j]) + ), + 0 + ) / 2 ); + this.props.layoutDoc.audioBucketRange = new List([this.clipStart, this.clipEnd]); + this.props.layoutDoc.audioBuckets = new List(bucketList); }) ); } ); } - - @action - createTrimBuckets = () => { - const audioBuckets = Cast( - this.props.layoutDoc.audioBuckets, - listSpec("number"), - [] - ); - - const start = Math.floor( - (NumCast(this.props.layoutDoc.clipStart) / this.props.duration) * 100 - ); - const end = Math.floor( - (NumCast(this.props.layoutDoc.clipEnd) / this.props.duration) * 100 - ); - return audioBuckets.slice(start, end); - } - render() { - const audioBuckets = Cast( - this.props.layoutDoc.audioBuckets, - listSpec("number"), - [] - ); - return (
- {this.props.trimming || !this.props.layoutDoc.clipEnd ? ( - - ) : ( - - )} +
); } -- cgit v1.2.3-70-g09d2 From 9675e948be8a7ea2d86c8ca68a89c09452ece0e7 Mon Sep 17 00:00:00 2001 From: bobzel Date: Thu, 23 Sep 2021 23:22:39 -0400 Subject: added code for editing the original waveform, not a clip when trimming is activated. --- src/client/views/AudioWaveform.tsx | 25 ++++++++++++---------- .../collections/CollectionStackedTimeline.tsx | 2 ++ src/client/views/nodes/AudioBox.tsx | 13 ++++++++--- 3 files changed, 26 insertions(+), 14 deletions(-) (limited to 'src/client/views/AudioWaveform.tsx') diff --git a/src/client/views/AudioWaveform.tsx b/src/client/views/AudioWaveform.tsx index 1e676e1f0..7d83ea3dc 100644 --- a/src/client/views/AudioWaveform.tsx +++ b/src/client/views/AudioWaveform.tsx @@ -17,6 +17,8 @@ export interface AudioWaveformProps { mediaPath: string; layoutDoc: Doc; trimming: boolean; + clipStart: number; + clipEnd: number; PanelHeight: () => number; } @@ -28,20 +30,22 @@ export class AudioWaveform extends React.Component { return Math.max(50, this.props.PanelHeight()); } - @computed get clipStart() { return NumCast(this.props.layoutDoc.clipStart); } - @computed get clipEnd() { return NumCast(this.props.layoutDoc.clipEnd, this.props.rawDuration); } - @computed get audioBuckets() { return Cast(this.props.layoutDoc.audioBuckets, listSpec("number"), []); } - @computed get audioBucketRange() { return Cast(this.props.layoutDoc.audioBucketRange, listSpec("number"), [-1, -1]); } + @computed get clipStart() { return this.props.clipStart; } + @computed get clipEnd() { return this.props.clipEnd; } + audioBucketField = (start: number, end: number) => { return "audioBuckets-" + start.toFixed(2) + "-" + end.toFixed(2); } + @computed get audioBuckets() { return Cast(this.props.layoutDoc[this.audioBucketField(this.clipStart, this.clipEnd)], listSpec("number"), []); } componentWillUnmount() { this._disposer?.(); } componentDidMount() { - this._disposer = reaction(() => [this.clipStart, this.clipEnd, this.audioBuckets.length, ...this.audioBucketRange], + this._disposer = reaction(() => [this.clipStart, this.clipEnd, this.audioBuckets.length], (range) => { - if (range[2] !== AudioWaveform.NUMBER_OF_BUCKETS || range[3] !== range[0] || range[4] !== range[1]) { - this.props.layoutDoc.audioBucketRange = new List([range[0], range[1]]); // setting these values here serves as a "lock" to prevent multiple attempts to create the waveform at nearly the same time. - this.props.layoutDoc.audioBuckets = new List(numberRange(AudioWaveform.NUMBER_OF_BUCKETS)); - setTimeout(this.createWaveformBuckets); + if (range[2] !== AudioWaveform.NUMBER_OF_BUCKETS) { + if (!this.props.layoutDoc[this.audioBucketField(range[0], range[1])]) { + // setting these values here serves as a "lock" to prevent multiple attempts to create the waveform at nerly the same time. + this.props.layoutDoc[this.audioBucketField(range[0], range[1])] = new List(numberRange(AudioWaveform.NUMBER_OF_BUCKETS)); + setTimeout(this.createWaveformBuckets); + } } }, { fireImmediately: true }); @@ -74,8 +78,7 @@ export class AudioWaveform extends React.Component { 0 ) / 2 ); - this.props.layoutDoc.audioBucketRange = new List([this.clipStart, this.clipEnd]); - this.props.layoutDoc.audioBuckets = new List(bucketList); + this.props.layoutDoc[this.audioBucketField(this.clipStart, this.clipEnd)] = new List(bucketList); }) ); } diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index 929bfa055..e00e66295 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -547,6 +547,8 @@ export class CollectionStackedTimeline extends CollectionSubView< duration={this.clipDuration} mediaPath={this.props.mediaPath} layoutDoc={this.layoutDoc} + clipStart={this.props.clipStart} + clipEnd={this.props.clipEnd} PanelHeight={this.timelineContentHeight} trimming={this.props.trimming} /> diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index f6d6ff440..6a25ffaeb 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -577,9 +577,16 @@ export class AudioBox extends ViewBoxAnnotatableComponent< PanelWidth={this.timelineWidth} PanelHeight={this.timelineHeight} rawDuration={this.rawDuration} - clipStart={this.clipStart} - clipEnd={this.clipEnd} - clipDuration={this.duration} + + // this edits the entire waveform when trimming is activated + clipStart={this._trimming ? 0 : this.clipStart} + clipEnd={this._trimming ? this.rawDuration : this.clipEnd} + clipDuration={this._trimming ? this.rawDuration : this.duration} + // this edits just the current waveform clip when trimming is activated + // clipStart={this.clipStart} + // clipEnd={this.clipEnd} + // clipDuration={this.duration} + trimming={this._trimming} trimStart={this.trimStartFunc} trimEnd={this.trimEndFunc} -- cgit v1.2.3-70-g09d2 From a4b3114f3792c80e20a3f40053ec4724729c1100 Mon Sep 17 00:00:00 2001 From: bobzel Date: Fri, 24 Sep 2021 00:40:30 -0400 Subject: added doubleclick on clip button to switch to editing entire timeline vs. single click to edit just the clip. fixed some audio timeline bugs with printing out current time and time span when trimming. cleaned up audioWaveform bucket fields. --- src/client/views/AudioWaveform.tsx | 24 +++---- .../collections/CollectionStackedTimeline.tsx | 13 ++-- src/client/views/nodes/AudioBox.tsx | 78 ++++++++++++---------- 3 files changed, 56 insertions(+), 59 deletions(-) (limited to 'src/client/views/AudioWaveform.tsx') diff --git a/src/client/views/AudioWaveform.tsx b/src/client/views/AudioWaveform.tsx index 7d83ea3dc..f7b117130 100644 --- a/src/client/views/AudioWaveform.tsx +++ b/src/client/views/AudioWaveform.tsx @@ -32,27 +32,25 @@ export class AudioWaveform extends React.Component { @computed get clipStart() { return this.props.clipStart; } @computed get clipEnd() { return this.props.clipEnd; } - audioBucketField = (start: number, end: number) => { return "audioBuckets-" + start.toFixed(2) + "-" + end.toFixed(2); } + audioBucketField = (start: number, end: number) => { return "audioBuckets/" + start.toFixed(2).replace(".", "_") + "/" + end.toFixed(2).replace(".", "_"); } @computed get audioBuckets() { return Cast(this.props.layoutDoc[this.audioBucketField(this.clipStart, this.clipEnd)], listSpec("number"), []); } componentWillUnmount() { this._disposer?.(); } componentDidMount() { - this._disposer = reaction(() => [this.clipStart, this.clipEnd, this.audioBuckets.length], - (range) => { - if (range[2] !== AudioWaveform.NUMBER_OF_BUCKETS) { - if (!this.props.layoutDoc[this.audioBucketField(range[0], range[1])]) { - // setting these values here serves as a "lock" to prevent multiple attempts to create the waveform at nerly the same time. - this.props.layoutDoc[this.audioBucketField(range[0], range[1])] = new List(numberRange(AudioWaveform.NUMBER_OF_BUCKETS)); - setTimeout(this.createWaveformBuckets); - } + this._disposer = reaction(() => ({ clipStart: this.clipStart, clipEnd: this.clipEnd, fieldKey: this.audioBucketField(this.clipStart, this.clipEnd) }), + ({ clipStart, clipEnd, fieldKey }) => { + if (!this.props.layoutDoc[fieldKey]) { + // setting these values here serves as a "lock" to prevent multiple attempts to create the waveform at nerly the same time. + this.props.layoutDoc[fieldKey] = new List(numberRange(AudioWaveform.NUMBER_OF_BUCKETS)); + setTimeout(() => this.createWaveformBuckets(fieldKey, clipStart, clipEnd)); } }, { fireImmediately: true }); } // decodes the audio file into peaks for generating the waveform - createWaveformBuckets = async () => { + createWaveformBuckets = async (fieldKey: string, clipStart: number, clipEnd: number) => { axios({ url: this.props.mediaPath, responseType: "arraybuffer" }).then( (response) => { const context = new window.AudioContext(); @@ -60,8 +58,8 @@ export class AudioWaveform extends React.Component { response.data, action((buffer) => { const rawDecodedAudioData = buffer.getChannelData(0); - const startInd = this.clipStart / this.props.rawDuration; - const endInd = this.clipEnd / this.props.rawDuration; + const startInd = clipStart / this.props.rawDuration; + const endInd = clipEnd / this.props.rawDuration; const decodedAudioData = rawDecodedAudioData.slice(Math.floor(startInd * rawDecodedAudioData.length), Math.floor(endInd * rawDecodedAudioData.length)); const bucketDataSize = Math.floor( @@ -78,7 +76,7 @@ export class AudioWaveform extends React.Component { 0 ) / 2 ); - this.props.layoutDoc[this.audioBucketField(this.clipStart, this.clipEnd)] = new List(bucketList); + this.props.layoutDoc[fieldKey] = new List(bucketList); }) ); } diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index e00e66295..7859d3c3f 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -50,7 +50,6 @@ import { DragManager } from "../../util/DragManager"; type PanZoomDocument = makeInterface<[]>; const PanZoomDocument = makeInterface(); export type CollectionStackedTimelineProps = { - clipDuration: number; Play: () => void; Pause: () => void; playLink: (linkDoc: Doc) => void; @@ -65,6 +64,7 @@ export type CollectionStackedTimelineProps = { trimming: boolean; clipStart: number; clipEnd: number; + clipDuration: number; trimStart: () => number; trimEnd: () => number; trimDuration: () => number; @@ -165,8 +165,8 @@ export class CollectionStackedTimeline extends CollectionSubView< } toTimeline = (screen_delta: number, width: number) => { return Math.max( - this.trimStart, - Math.min(this.trimEnd, (screen_delta / width) * this.props.trimDuration() + this.trimStart)); + this.props.clipStart, + Math.min(this.props.clipEnd, (screen_delta / width) * this.props.clipDuration + this.props.clipStart)); } rangeClickScript = () => CollectionStackedTimeline.RangeScript; @@ -282,12 +282,7 @@ export class CollectionStackedTimeline extends CollectionSubView< this.props.isSelected(true) || this.props.isContentActive(), undefined, () => { - !wasPlaying && - (this.props.trimming && this.clipDuration ? - this.props.setTime(((clientX - rect.x) / rect.width) * this.clipDuration) - : - this.props.setTime(((clientX - rect.x) / rect.width) * this.props.trimDuration() + this.trimStart) - ); + !wasPlaying && this.props.setTime(((clientX - rect.x) / rect.width) * this.clipDuration + this.props.clipStart); } ); } diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 6a25ffaeb..bfc15cea8 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -16,7 +16,7 @@ import { makeInterface } from "../../../fields/Schema"; import { ComputedField } from "../../../fields/ScriptField"; import { Cast, NumCast } from "../../../fields/Types"; import { AudioField, nullAudio } from "../../../fields/URLField"; -import { emptyFunction, formatTime, OmitKeys } from "../../../Utils"; +import { emptyFunction, formatTime, OmitKeys, setupMoveUpEvents, returnFalse } from "../../../Utils"; import { DocUtils } from "../../documents/Documents"; import { Networking } from "../../Network"; import { CurrentUserUtils } from "../../util/CurrentUserUtils"; @@ -48,13 +48,14 @@ export class AudioBox extends ViewBoxAnnotatableComponent< ViewBoxAnnotatableProps & FieldViewProps, AudioDocument >(AudioDocument) { - public static LayoutString(fieldKey: string) { - return FieldView.LayoutString(AudioBox, fieldKey); - } + public static LayoutString(fieldKey: string) { return FieldView.LayoutString(AudioBox, fieldKey); } public static Enabled = false; static playheadWidth = 40; // width of playhead static heightPercent = 75; // height of timeline in percent of height of audioBox. static Instance: AudioBox; + static ScopeAll = 2; + static ScopeClip = 1; + static ScopeNone = 0; _disposers: { [name: string]: IReactionDisposer } = {}; _ele: HTMLAudioElement | null = null; @@ -74,14 +75,19 @@ export class AudioBox extends ViewBoxAnnotatableComponent< @observable _position: number = 0; @observable _waveHeight: Opt = this.layoutDoc._height; @observable _paused: boolean = false; - @observable _trimming: boolean = false; + @observable _trimming: number = AudioBox.ScopeNone; @observable _trimStart: number = NumCast(this.layoutDoc.clipStart); @observable _trimEnd: number | undefined = Cast(this.layoutDoc.clipEnd, "number"); - @computed get clipStart() { return NumCast(this.layoutDoc.clipStart); } - @computed get clipEnd() { return NumCast(this.layoutDoc.clipEnd, this.duration); } - @computed get trimStart() { return this._trimming ? this._trimStart : NumCast(this.layoutDoc.clipStart); } + @computed get clipStart() { return this._trimming === AudioBox.ScopeAll ? 0 : NumCast(this.layoutDoc.clipStart); } + @computed get clipDuration() { + return this._trimming === AudioBox.ScopeAll ? NumCast(this.dataDoc[`${this.fieldKey}-duration`]) : + NumCast(this.layoutDoc.clipEnd, this.clipStart + NumCast(this.dataDoc[`${this.fieldKey}-duration`])) - this.clipStart; + } + @computed get clipEnd() { return this.clipStart + this.clipDuration; } + @computed get trimStart() { return this._trimming !== AudioBox.ScopeNone ? this._trimStart : NumCast(this.layoutDoc.clipStart); } + @computed get trimDuration() { return this.trimEnd - this.trimStart; } @computed get trimEnd() { - return this._trimming && this._trimEnd !== undefined ? this._trimEnd : NumCast(this.layoutDoc.clipEnd, this.duration); + return this._trimming !== AudioBox.ScopeNone && this._trimEnd !== undefined ? this._trimEnd : NumCast(this.layoutDoc.clipEnd, this.clipDuration); } @computed get mediaState(): @@ -113,13 +119,6 @@ export class AudioBox extends ViewBoxAnnotatableComponent< @computed get rawDuration() { return NumCast(this.dataDoc[`${this.fieldKey}-duration`]); } - @computed get duration() { - return NumCast(this.layoutDoc.clipEnd, NumCast(this.layoutDoc.clipStart) + NumCast(this.dataDoc[`${this.fieldKey}-duration`])) - NumCast(this.layoutDoc.clipStart); - // NumCast(this.dataDoc[`${this.fieldKey}-duration`]); - } - @computed get trimDuration() { - return this.trimEnd - this.trimStart; - } @computed get anchorDocs() { return DocListCast(this.dataDoc[this.annotationKey]); } @@ -269,7 +268,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< this._ele.currentTime = start; this._ele.play(); runInAction(() => (this.mediaState = "playing")); - if (endTime !== this.duration) { + if (endTime !== this.clipDuration) { this._play = setTimeout( () => { this._ended = fullPlay ? true : this._ended; @@ -309,7 +308,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< const [{ result }] = await Networking.UploadFilesToServer(e.data); if (!(result instanceof Error)) { this.props.Document[this.props.fieldKey] = new AudioField(result.accessPaths.agnostic.client); - if (this._trimEnd === undefined) this._trimEnd = this.duration; + if (this._trimEnd === undefined) this._trimEnd = this.clipDuration; } }; this._recordStart = new Date().getTime(); @@ -359,9 +358,9 @@ export class AudioBox extends ViewBoxAnnotatableComponent< this.dataDoc[this.fieldKey + "-duration"] = (new Date().getTime() - this._recordStart - this.pauseTime) / 1000; this.mediaState = "paused"; - this._trimEnd = this.duration; + this._trimEnd = this.clipDuration; this.layoutDoc.clipStart = 0; - this.layoutDoc.clipEnd = this.duration; + this.layoutDoc.clipEnd = this.clipDuration; this._stream?.getAudioTracks()[0].stop(); const ind = DocUtils.ActiveRecordings.indexOf(this); ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); @@ -378,7 +377,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< // for play button Play = (e?: any) => { let start; - if (this._ended || this._ele!.currentTime === this.duration) { + if (this._ended || this._ele!.currentTime === this.clipDuration) { start = NumCast(this.layoutDoc.clipStart); this._ended = false; } @@ -494,11 +493,11 @@ export class AudioBox extends ViewBoxAnnotatableComponent< // shows trim controls @action - startTrim = () => { + startTrim = (scope: number) => { if (this.mediaState === "playing") { this.Pause(); } - this._trimming = true; + this._trimming = scope; } // hides trim controls and displays new clip @@ -510,7 +509,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< this.layoutDoc.clipStart = this.trimStart; this.layoutDoc.clipEnd = this.trimEnd; this.setAnchorTime(Math.max(Math.min(this.trimEnd, this._ele!.currentTime), this.trimStart)); - this._trimming = false; + this._trimming = AudioBox.ScopeNone; }); @action @@ -579,15 +578,15 @@ export class AudioBox extends ViewBoxAnnotatableComponent< rawDuration={this.rawDuration} // this edits the entire waveform when trimming is activated - clipStart={this._trimming ? 0 : this.clipStart} - clipEnd={this._trimming ? this.rawDuration : this.clipEnd} - clipDuration={this._trimming ? this.rawDuration : this.duration} + clipStart={this._trimming === AudioBox.ScopeAll ? 0 : this.clipStart} + clipEnd={this._trimming === AudioBox.ScopeAll ? this.rawDuration : this.clipEnd} + clipDuration={this._trimming === AudioBox.ScopeAll ? this.rawDuration : this.clipDuration} // this edits just the current waveform clip when trimming is activated // clipStart={this.clipStart} // clipEnd={this.clipEnd} // clipDuration={this.duration} - trimming={this._trimming} + trimming={this._trimming !== AudioBox.ScopeNone} trimStart={this.trimStartFunc} trimEnd={this.trimEndFunc} trimDuration={this.trimDurationFunc} @@ -596,6 +595,15 @@ export class AudioBox extends ViewBoxAnnotatableComponent< /> ); } + onClipPointerDown = (e: React.PointerEvent) => { + setupMoveUpEvents(this, e, returnFalse, returnFalse, action((e: PointerEvent, doubleTap?: boolean) => { + if (doubleTap) { + this.startTrim(AudioBox.ScopeAll); + } else { + this._trimming !== AudioBox.ScopeNone ? this.finishTrim() : this.startTrim(AudioBox.ScopeClip); + } + })); + } render() { const interactive = @@ -697,11 +705,11 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
@@ -719,14 +727,10 @@ export class AudioBox extends ViewBoxAnnotatableComponent< {this.audio}
- {this._trimming ? - formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode))) - : formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this.trimStart)))} + {formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this.clipStart)))}
- {this._trimming || !this._trimEnd ? - formatTime(Math.round(NumCast(this.duration))) - : formatTime(Math.round(NumCast(this.trimDuration)))} + {formatTime(Math.round(NumCast(this.clipDuration)))}
-- cgit v1.2.3-70-g09d2 From cbf22b4ccaab14a7c8cb62137ea09b58a001e13a Mon Sep 17 00:00:00 2001 From: bobzel Date: Fri, 24 Sep 2021 18:45:33 -0400 Subject: improved initial waveform display when making a clipping. --- src/client/views/AudioWaveform.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'src/client/views/AudioWaveform.tsx') diff --git a/src/client/views/AudioWaveform.tsx b/src/client/views/AudioWaveform.tsx index f7b117130..0a441552e 100644 --- a/src/client/views/AudioWaveform.tsx +++ b/src/client/views/AudioWaveform.tsx @@ -42,7 +42,8 @@ export class AudioWaveform extends React.Component { ({ clipStart, clipEnd, fieldKey }) => { if (!this.props.layoutDoc[fieldKey]) { // setting these values here serves as a "lock" to prevent multiple attempts to create the waveform at nerly the same time. - this.props.layoutDoc[fieldKey] = new List(numberRange(AudioWaveform.NUMBER_OF_BUCKETS)); + const waveform = Cast(this.props.layoutDoc[this.audioBucketField(0, this.props.rawDuration)], listSpec("number"), []); + this.props.layoutDoc[fieldKey] = new List(waveform.slice(clipStart / this.props.rawDuration * waveform.length, clipEnd / this.props.rawDuration * waveform.length)); setTimeout(() => this.createWaveformBuckets(fieldKey, clipStart, clipEnd)); } }, { fireImmediately: true }); -- cgit v1.2.3-70-g09d2 From 6bcf4ae5f3953ba10ba1fba6c7d2246514a90eed Mon Sep 17 00:00:00 2001 From: bobzel Date: Fri, 24 Sep 2021 22:31:22 -0400 Subject: refactored trim stuff out of audiobox into collectionstackedtimeline so that videobox can reuse trimming --- src/client/views/AudioWaveform.tsx | 5 +- src/client/views/ContextMenuItem.tsx | 4 +- .../collections/CollectionStackedTimeline.tsx | 193 +++++------- src/client/views/nodes/AudioBox.tsx | 350 ++++++++------------- src/client/views/nodes/VideoBox.tsx | 59 ++-- 5 files changed, 248 insertions(+), 363 deletions(-) (limited to 'src/client/views/AudioWaveform.tsx') diff --git a/src/client/views/AudioWaveform.tsx b/src/client/views/AudioWaveform.tsx index 0a441552e..0e9c00656 100644 --- a/src/client/views/AudioWaveform.tsx +++ b/src/client/views/AudioWaveform.tsx @@ -16,7 +16,6 @@ export interface AudioWaveformProps { rawDuration: number; // length of underlying media data mediaPath: string; layoutDoc: Doc; - trimming: boolean; clipStart: number; clipEnd: number; PanelHeight: () => number; @@ -42,8 +41,8 @@ export class AudioWaveform extends React.Component { ({ clipStart, clipEnd, fieldKey }) => { if (!this.props.layoutDoc[fieldKey]) { // setting these values here serves as a "lock" to prevent multiple attempts to create the waveform at nerly the same time. - const waveform = Cast(this.props.layoutDoc[this.audioBucketField(0, this.props.rawDuration)], listSpec("number"), []); - this.props.layoutDoc[fieldKey] = new List(waveform.slice(clipStart / this.props.rawDuration * waveform.length, clipEnd / this.props.rawDuration * waveform.length)); + const waveform = Cast(this.props.layoutDoc[this.audioBucketField(0, this.props.rawDuration)], listSpec("number")); + this.props.layoutDoc[fieldKey] = waveform && new List(waveform.slice(clipStart / this.props.rawDuration * waveform.length, clipEnd / this.props.rawDuration * waveform.length)); setTimeout(() => this.createWaveformBuckets(fieldKey, clipStart, clipEnd)); } }, { fireImmediately: true }); diff --git a/src/client/views/ContextMenuItem.tsx b/src/client/views/ContextMenuItem.tsx index c3921d846..25d00f701 100644 --- a/src/client/views/ContextMenuItem.tsx +++ b/src/client/views/ContextMenuItem.tsx @@ -39,7 +39,7 @@ export class ContextMenuItem extends React.Component) => { if ("event" in this.props) { - this.props.closeMenu && this.props.closeMenu(); + this.props.closeMenu?.(); let batch: UndoManager.Batch | undefined; if (this.props.undoable !== false) { batch = UndoManager.StartBatch(`Context menu event: ${this.props.description}`); @@ -90,7 +90,7 @@ export class ContextMenuItem extends React.Component ) : null}
- {this.props.description.replace(":","")} + {this.props.description.replace(":", "")}
); diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index 43f78cf78..48014921a 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -61,24 +61,21 @@ export type CollectionStackedTimelineProps = { mediaPath: string; dictationKey: string; rawDuration: number; - trimming: boolean; - clipStart: number; - clipEnd: number; - clipDuration: number; - trimStart: () => number; - trimEnd: () => number; - trimDuration: () => number; - setStartTrim: (newStart: number) => void; - setEndTrim: (newEnd: number) => void; + fieldKey: string; }; +export enum TrimScope { + All = 2, + Clip = 1, + None = 0, +} + @observer export class CollectionStackedTimeline extends CollectionSubView< PanZoomDocument, CollectionStackedTimelineProps >(PanZoomDocument) { - @observable static SelectingRegion: CollectionStackedTimeline | undefined = - undefined; + @observable static SelectingRegion: CollectionStackedTimeline | undefined; static RangeScript: ScriptField; static LabelScript: ScriptField; static RangePlayScript: ScriptField; @@ -87,37 +84,43 @@ export class CollectionStackedTimeline extends CollectionSubView< private _timeline: HTMLDivElement | null = null; private _markerStart: number = 0; @observable _markerEnd: number = 0; + @observable _trimming: number = TrimScope.None; + @observable _trimStart: number = 0; + @observable _trimEnd: number = 0; - get minLength() { - const rect = this._timeline?.getBoundingClientRect(); - if (rect) { - return 0.05 * this.clipDuration; - } - return 0; - } + get minTrimLength() { return this._timeline?.getBoundingClientRect() ? 0.05 * this.clipDuration : 0; } + @computed get trimStart() { return this.IsTrimming !== TrimScope.None ? this._trimStart : this.clipStart; } + @computed get trimDuration() { return this.trimEnd - this.trimStart; } + @computed get trimEnd() { return this.IsTrimming !== TrimScope.None ? this._trimEnd : this.clipEnd; } - get trimStart() { - return this.props.trimStart(); - } + @computed get clipStart() { return this.IsTrimming === TrimScope.All ? 0 : NumCast(this.layoutDoc.clipStart); } + @computed get clipDuration() { return this.clipEnd - this.clipStart; } + @computed get clipEnd() { return this.IsTrimming === TrimScope.All ? this.props.rawDuration : NumCast(this.layoutDoc.clipEnd, this.props.rawDuration); } - get trimEnd() { - return this.props.trimEnd(); - } + @computed get currentTime() { return NumCast(this.layoutDoc._currentTimecode); } - get clipDuration() { - return this.props.clipDuration; - } + public get IsTrimming() { return this._trimming; } - @computed get currentTime() { - return NumCast(this.layoutDoc._currentTimecode); + @action + public StartTrimming(scope: TrimScope) { + this._trimStart = this.clipStart; + this._trimEnd = this.clipEnd; + this._trimming = scope; + } + @action + public StopTrimming() { + this.layoutDoc.clipStart = this.trimStart; + this.layoutDoc.clipEnd = this.trimEnd; + this._trimming = TrimScope.None; } + @computed get selectionContainer() { return CollectionStackedTimeline.SelectingRegion !== this ? null : (
); @@ -145,28 +148,21 @@ export class CollectionStackedTimeline extends CollectionSubView< componentDidMount() { document.addEventListener("keydown", this.keyEvents, true); } + + @action componentWillUnmount() { document.removeEventListener("keydown", this.keyEvents, true); if (CollectionStackedTimeline.SelectingRegion === this) { - runInAction( - () => (CollectionStackedTimeline.SelectingRegion = undefined) - ); + CollectionStackedTimeline.SelectingRegion = undefined; } } - anchorStart = (anchor: Doc) => - NumCast(anchor._timecodeToShow, NumCast(anchor[this.props.startTag])) - anchorEnd = (anchor: Doc, val: any = null) => { - const endVal = NumCast(anchor[this.props.endTag], val); - return NumCast( - anchor._timecodeToHide, - endVal === undefined ? null : endVal - ); - } + anchorStart = (anchor: Doc) => NumCast(anchor._timecodeToShow, NumCast(anchor[this.props.startTag])) + anchorEnd = (anchor: Doc, val: any = null) => NumCast(anchor._timecodeToHide, NumCast(anchor[this.props.endTag], val)); toTimeline = (screen_delta: number, width: number) => { return Math.max( - this.props.clipStart, - Math.min(this.props.clipEnd, (screen_delta / width) * this.props.clipDuration + this.props.clipStart)); + this.clipStart, + Math.min(this.clipEnd, (screen_delta / width) * this.clipDuration + this.clipStart)); } rangeClickScript = () => CollectionStackedTimeline.RangeScript; @@ -233,10 +229,7 @@ export class CollectionStackedTimeline extends CollectionSubView< !wasSelecting && CollectionStackedTimeline.SelectingRegion !== this ) { - this._markerStart = this._markerEnd = this.toTimeline( - clientX - rect.x, - rect.width - ); + this._markerStart = this._markerEnd = this.toTimeline(clientX - rect.x, rect.width); CollectionStackedTimeline.SelectingRegion = this; wasSelecting = true; } @@ -254,7 +247,7 @@ export class CollectionStackedTimeline extends CollectionSubView< !isClick && CollectionStackedTimeline.SelectingRegion === this && Math.abs(movement[0]) > 15 && - !this.props.trimming + !this.IsTrimming ) { const anchor = CollectionStackedTimeline.createAnchor( this.rootDoc, @@ -287,7 +280,7 @@ export class CollectionStackedTimeline extends CollectionSubView< this.currentTime ); } else { - !wasPlaying && this.props.setTime(((clientX - rect.x) / rect.width) * this.clipDuration + this.props.clipStart); + !wasPlaying && this.props.setTime(this.toTimeline(clientX - rect.x, rect.width)); } } ); @@ -304,21 +297,19 @@ export class CollectionStackedTimeline extends CollectionSubView< e, action((e, [], []) => { if (rect && this.props.isContentActive()) { - this.props.setStartTrim(Math.min( + this._trimStart = Math.min( Math.max( this.trimStart + (e.movementX / rect.width) * this.clipDuration, 0 ), - this.trimEnd - this.minLength - )); + this.trimEnd - this.minTrimLength + ); } return false; }), emptyFunction, action((e, doubleTap) => { - if (doubleTap) { - this.props.setStartTrim(this.props.clipStart); - } + doubleTap && (this._trimStart = this.clipStart); }) ); } @@ -332,21 +323,19 @@ export class CollectionStackedTimeline extends CollectionSubView< e, action((e, [], []) => { if (rect && this.props.isContentActive()) { - this.props.setEndTrim(Math.max( + this._trimEnd = Math.max( Math.min( this.trimEnd + (e.movementX / rect.width) * this.clipDuration, - this.props.clipStart + this.clipDuration + this.clipStart + this.clipDuration ), - this.trimStart + this.minLength - )); + this.trimStart + this.minTrimLength + ); } return false; }), emptyFunction, action((e, doubleTap) => { - if (doubleTap) { - this.props.setEndTrim(this.props.clipEnd); - } + doubleTap && (this._trimEnd = this.clipEnd); }) ); } @@ -356,17 +345,16 @@ export class CollectionStackedTimeline extends CollectionSubView< if (!de.embedKey && this.props.layerProvider?.(this.props.Document) !== false && this.props.Document._isGroup) return false; if (!super.onInternalDrop(e, de)) return false; - // determine x coordinate of drop and assign it to the documents being dragged --- see internalDocDrop of collectionFreeFormView.tsx for how it's done when dropping onto a 2D freeform view const localPt = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); const x = localPt[0] - docDragData.offset[0]; const timelinePt = this.toTimeline(x, this.props.PanelWidth()); for (let i = 0; i < docDragData.droppedDocuments.length; i++) { const d = Doc.GetProto(docDragData.droppedDocuments[i]); - if (d._timecodeToHide !== undefined) { - d._timecodeToHide = timelinePt + NumCast(d._timecodeToHide) - NumCast(d._timecodeToShow); + if (this.anchorEnd(d) !== undefined) { + d[d._timecodeToHide === undefined ? this.props.endTag : "_timecodeToHide"] = timelinePt + this.anchorEnd(d) - this.anchorStart(d); } - d._timecodeToShow = timelinePt; + d[d._timecodToShow === undefined ? this.props.startTag : "_timecodToShow"] = timelinePt; } return true; @@ -403,7 +391,7 @@ export class CollectionStackedTimeline extends CollectionSubView< }); Doc.GetProto(anchor)[startTag] = anchorStartTime; Doc.GetProto(anchor)[endTag] = anchorEndTime; - if (Cast(dataDoc[fieldKey], listSpec(Doc), null) !== undefined) { + if (Cast(dataDoc[fieldKey], listSpec(Doc), null)) { Cast(dataDoc[fieldKey], listSpec(Doc), []).push(anchor); } else { dataDoc[fieldKey] = new List([anchor]); @@ -546,10 +534,9 @@ export class CollectionStackedTimeline extends CollectionSubView< duration={this.clipDuration} mediaPath={this.props.mediaPath} layoutDoc={this.layoutDoc} - clipStart={this.props.clipStart} - clipEnd={this.props.clipEnd} + clipStart={this.clipStart} + clipEnd={this.clipEnd} PanelHeight={this.timelineContentHeight} - trimming={this.props.trimming} />
); @@ -582,12 +569,12 @@ export class CollectionStackedTimeline extends CollectionSubView< d.anchor, start + (10 / timelineContentWidth) * this.clipDuration ); - if (end < this.props.clipStart || start > this.props.clipEnd) return (null); - const left = Math.max((start - this.props.clipStart) / this.clipDuration * timelineContentWidth, 0); + if (end < this.clipStart || start > this.clipEnd) return (null); + const left = Math.max((start - this.clipStart) / this.clipDuration * timelineContentWidth, 0); const top = (d.level / maxLevel) * this.timelineContentHeight(); const timespan = end - start; const width = (timespan / this.clipDuration) * timelineContentWidth; - const height = (this.timelineContentHeight()) / maxLevel; + const height = this.timelineContentHeight() / maxLevel; return this.props.Document.hideAnchors ? null : (
); })} - {!this.props.trimming && this.selectionContainer} + {!this.IsTrimming && this.selectionContainer} {this.renderAudioWaveform} {this.renderDictation}
- {this.props.trimming && ( + {this.IsTrimming && ( <>
@@ -662,8 +649,8 @@ export class CollectionStackedTimeline extends CollectionSubView<
@@ -754,8 +741,7 @@ class StackedTimelineAnchor extends React.Component return this.props.toTimeline(e.clientX - rect.x, rect.width); }; const changeAnchor = (anchor: Doc, left: boolean, time: number | undefined) => { - const timelineOnly = - Cast(anchor[this.props.startTag], "number", null) !== undefined; + const timelineOnly = Cast(anchor[this.props.startTag], "number", null) !== undefined; if (timelineOnly) { if (!left && time !== undefined && time <= NumCast(anchor[this.props.startTag])) time = undefined; Doc.SetInPlace( @@ -767,9 +753,7 @@ class StackedTimelineAnchor extends React.Component if (!left) Doc.SetInPlace(anchor, "borderRounding", time !== undefined ? undefined : "100%", true); } else { - left - ? (anchor._timecodeToShow = time) - : (anchor._timecodeToHide = time); + anchor[left ? "_timecodeToShow" : "_timecodeToHide"] = time; } return false; }; @@ -803,10 +787,9 @@ class StackedTimelineAnchor extends React.Component mark: Doc, script: undefined | (() => ScriptField), doublescript: undefined | (() => ScriptField), - x: number, - y: number, - width: number, - height: number + screenXf: () => Transform, + width: () => number, + height: () => number ) { const anchor = observable({ view: undefined as any }); const focusFunc = ( @@ -825,24 +808,20 @@ class StackedTimelineAnchor extends React.Component (anchor.view = r))} + ref={action((r: DocumentView | null) => anchor.view = r)} Document={mark} DataDoc={undefined} renderDepth={this.props.renderDepth + 1} LayoutTemplate={undefined} LayoutTemplateString={LabelBox.LayoutStringWithTitle(LabelBox, "data", this.computeTitle())} isDocumentActive={this.props.isDocumentActive} - PanelWidth={() => width} - PanelHeight={() => height} - ScreenToLocalTransform={() => - this.props.ScreenToLocalTransform().translate(-x, -y) - } + PanelWidth={width} + PanelHeight={height} + ScreenToLocalTransform={screenXf} focus={focusFunc} rootSelected={returnFalse} onClick={script} - onDoubleClick={ - this.props.layoutDoc.autoPlayAnchors ? undefined : doublescript - } + onDoubleClick={this.props.layoutDoc.autoPlayAnchors ? undefined : doublescript} ignoreAutoHeight={false} hideResizeHandles={true} bringToFront={emptyFunction} @@ -852,15 +831,17 @@ class StackedTimelineAnchor extends React.Component }; }); + anchorScreenToLocalXf = () => this.props.ScreenToLocalTransform().translate(-this.props.left, -this.props.top); + width = () => this.props.width; + height = () => this.props.height; render() { const inner = this.renderInner( this.props.mark, this.props.rangeClickScript, this.props.rangePlayScript, - this.props.left, - this.props.top, - this.props.width, - this.props.height + this.anchorScreenToLocalXf, + this.width, + this.height ); return ( <> @@ -876,9 +857,7 @@ class StackedTimelineAnchor extends React.Component
- this.onAnchorDown(e, this.props.mark, false) - } + onPointerDown={(e) => this.onAnchorDown(e, this.props.mark, false)} /> )} diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index bfc15cea8..81367ed19 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -14,15 +14,15 @@ import { Doc, DocListCast, Opt } from "../../../fields/Doc"; import { documentSchema } from "../../../fields/documentSchemas"; import { makeInterface } from "../../../fields/Schema"; import { ComputedField } from "../../../fields/ScriptField"; -import { Cast, NumCast } from "../../../fields/Types"; +import { Cast, NumCast, DateCast } from "../../../fields/Types"; import { AudioField, nullAudio } from "../../../fields/URLField"; -import { emptyFunction, formatTime, OmitKeys, setupMoveUpEvents, returnFalse } from "../../../Utils"; +import { emptyFunction, formatTime, OmitKeys, returnFalse, setupMoveUpEvents } from "../../../Utils"; import { DocUtils } from "../../documents/Documents"; import { Networking } from "../../Network"; import { CurrentUserUtils } from "../../util/CurrentUserUtils"; import { DragManager } from "../../util/DragManager"; import { SnappingManager } from "../../util/SnappingManager"; -import { CollectionStackedTimeline } from "../collections/CollectionStackedTimeline"; +import { CollectionStackedTimeline, TrimScope } from "../collections/CollectionStackedTimeline"; import { ContextMenu } from "../ContextMenu"; import { ContextMenuProps } from "../ContextMenuItem"; import { @@ -43,20 +43,21 @@ declare class MediaRecorder { type AudioDocument = makeInterface<[typeof documentSchema]>; const AudioDocument = makeInterface(documentSchema); +enum media_state { + PendingRecording = "pendingRecording", + Recording = "recording", + Paused = "paused", + Playing = "playing" +}; @observer -export class AudioBox extends ViewBoxAnnotatableComponent< - ViewBoxAnnotatableProps & FieldViewProps, - AudioDocument ->(AudioDocument) { +export class AudioBox extends ViewBoxAnnotatableComponent(AudioDocument) { public static LayoutString(fieldKey: string) { return FieldView.LayoutString(AudioBox, fieldKey); } public static Enabled = false; static playheadWidth = 40; // width of playhead static heightPercent = 75; // height of timeline in percent of height of audioBox. static Instance: AudioBox; - static ScopeAll = 2; - static ScopeClip = 1; - static ScopeNone = 0; + _dropDisposer?: DragManager.DragDropDisposer; _disposers: { [name: string]: IReactionDisposer } = {}; _ele: HTMLAudioElement | null = null; _stackedTimeline = React.createRef(); @@ -68,81 +69,39 @@ export class AudioBox extends ViewBoxAnnotatableComponent< _stream: MediaStream | undefined; _start: number = 0; _play: any = null; - _ended: boolean = false; @observable static _scrubTime = 0; @observable _markerEnd: number = 0; @observable _position: number = 0; @observable _waveHeight: Opt = this.layoutDoc._height; @observable _paused: boolean = false; - @observable _trimming: number = AudioBox.ScopeNone; - @observable _trimStart: number = NumCast(this.layoutDoc.clipStart); - @observable _trimEnd: number | undefined = Cast(this.layoutDoc.clipEnd, "number"); - @computed get clipStart() { return this._trimming === AudioBox.ScopeAll ? 0 : NumCast(this.layoutDoc.clipStart); } - @computed get clipDuration() { - return this._trimming === AudioBox.ScopeAll ? NumCast(this.dataDoc[`${this.fieldKey}-duration`]) : - NumCast(this.layoutDoc.clipEnd, this.clipStart + NumCast(this.dataDoc[`${this.fieldKey}-duration`])) - this.clipStart; - } - @computed get clipEnd() { return this.clipStart + this.clipDuration; } - @computed get trimStart() { return this._trimming !== AudioBox.ScopeNone ? this._trimStart : NumCast(this.layoutDoc.clipStart); } - @computed get trimDuration() { return this.trimEnd - this.trimStart; } - @computed get trimEnd() { - return this._trimming !== AudioBox.ScopeNone && this._trimEnd !== undefined ? this._trimEnd : NumCast(this.layoutDoc.clipEnd, this.clipDuration); - } + @computed get recordingStart() { return DateCast(this.dataDoc[this.fieldKey + "-recordingStart"])?.date.getTime(); } + @computed get rawDuration() { return NumCast(this.dataDoc[`${this.fieldKey}-duration`]); } + @computed get anchorDocs() { return DocListCast(this.dataDoc[this.annotationKey]); } + @computed get links() { return DocListCast(this.dataDoc.links); } + @computed get pauseTime() { return this._pauseEnd - this._pauseStart; } // total time paused to update the correct time + @computed get heightPercent() { return AudioBox.heightPercent; } + @computed get mediaState() { return this.layoutDoc.mediaState as media_state; } + set mediaState(value) { this.layoutDoc.mediaState = value; } - @computed get mediaState(): - | undefined - | "pendingRecording" - | "recording" - | "paused" - | "playing" { - return this.layoutDoc.mediaState as - | undefined - | "pendingRecording" - | "recording" - | "paused" - | "playing"; - } - set mediaState(value) { - this.layoutDoc.mediaState = value; - } - public static SetScrubTime = action((timeInMillisFrom1970: number) => { - AudioBox._scrubTime = 0; - AudioBox._scrubTime = timeInMillisFrom1970; - }); - @computed get recordingStart() { - return Cast( - this.dataDoc[this.props.fieldKey + "-recordingStart"], - DateField - )?.date.getTime(); - } - @computed get rawDuration() { - return NumCast(this.dataDoc[`${this.fieldKey}-duration`]); - } - @computed get anchorDocs() { - return DocListCast(this.dataDoc[this.annotationKey]); - } - @computed get links() { - return DocListCast(this.dataDoc.links); - } - @computed get pauseTime() { - return this._pauseEnd - this._pauseStart; - } // total time paused to update the correct time - @computed get heightPercent() { - return AudioBox.heightPercent; - } + get timeline() { return this._stackedTimeline.current; } constructor(props: Readonly) { super(props); AudioBox.Instance = this; } + public static SetScrubTime = action((timeInMillisFrom1970: number) => { + AudioBox._scrubTime = 0; + AudioBox._scrubTime = timeInMillisFrom1970; + }); + getLinkData(l: Doc) { let la1 = l.anchor1 as Doc; let la2 = l.anchor2 as Doc; const linkTime = - this._stackedTimeline.current?.anchorStart(la2) || - this._stackedTimeline.current?.anchorStart(la1) || + this.timeline?.anchorStart(la2) || + this.timeline?.anchorStart(la1) || 0; if (Doc.AreProtosEqual(la1, this.dataDoc)) { la1 = l.anchor2 as Doc; @@ -152,47 +111,42 @@ export class AudioBox extends ViewBoxAnnotatableComponent< } getAnchor = () => { - return ( - CollectionStackedTimeline.createAnchor( - this.rootDoc, - this.dataDoc, - this.annotationKey, - "_timecodeToShow" /* audioStart */, - "_timecodeToHide" /* audioEnd */, - this._ele?.currentTime || - Cast(this.props.Document._currentTimecode, "number", null) || - (this.mediaState === "recording" - ? (Date.now() - (this.recordingStart || 0)) / 1000 - : undefined) - ) || this.rootDoc - ); + return CollectionStackedTimeline.createAnchor( + this.rootDoc, + this.dataDoc, + this.annotationKey, + "_timecodeToShow" /* audioStart */, + "_timecodeToHide" /* audioEnd */, + this._ele?.currentTime || + Cast(this.props.Document._currentTimecode, "number", null) || + (this.mediaState === media_state.Recording + ? (Date.now() - (this.recordingStart || 0)) / 1000 + : undefined) + ) || this.rootDoc; } componentWillUnmount() { - this.dropDisposer?.(); + this._dropDisposer?.(); Object.values(this._disposers).forEach((disposer) => disposer?.()); const ind = DocUtils.ActiveRecordings.indexOf(this); ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); } - private dropDisposer?: DragManager.DragDropDisposer; @action componentDidMount() { this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link. - this.mediaState = this.path ? "paused" : undefined; + this.mediaState = this.path ? media_state.Paused : undefined as any as media_state; this.path && this.setAnchorTime(NumCast(this.layoutDoc.clipStart)); this.path && this.timecodeChanged(); this._disposers.triggerAudio = reaction( - () => - !LinkDocPreview.LinkInfo && this.props.renderDepth !== -1 - ? NumCast(this.Document._triggerAudio, null) - : undefined, + () => !LinkDocPreview.LinkInfo && this.props.renderDepth !== -1 + ? NumCast(this.Document._triggerAudio, null) + : undefined, (start) => - start !== undefined && - setTimeout(() => { + start !== undefined && setTimeout(() => { this.playFrom(start); setTimeout(() => { this.Document._currentTimecode = start; @@ -203,13 +157,11 @@ export class AudioBox extends ViewBoxAnnotatableComponent< ); this._disposers.audioStop = reaction( - () => - this.props.renderDepth !== -1 && !LinkDocPreview.LinkInfo - ? Cast(this.Document._audioStop, "number", null) - : undefined, + () => this.props.renderDepth !== -1 && !LinkDocPreview.LinkInfo + ? Cast(this.Document._audioStop, "number", null) + : undefined, (audioStop) => - audioStop !== undefined && - setTimeout(() => { + audioStop !== undefined && setTimeout(() => { this.Pause(); setTimeout(() => (this.Document._audioStop = undefined), 10); }), // wait for mainCont and try again to play @@ -220,27 +172,25 @@ export class AudioBox extends ViewBoxAnnotatableComponent< // for updating the timecode @action timecodeChanged = () => { - const htmlEle = this._ele; - if (this.mediaState !== "recording" && htmlEle) { + if (this.mediaState !== media_state.Recording && this._ele) { this.links .map((l) => this.getLinkData(l)) .forEach(({ la1, la2, linkTime }) => { if ( linkTime > NumCast(this.layoutDoc._currentTimecode) && - linkTime < htmlEle.currentTime + linkTime < this._ele!.currentTime ) { Doc.linkFollowHighlight(la1); } }); - this.layoutDoc._currentTimecode = htmlEle.currentTime; - + this.layoutDoc._currentTimecode = this._ele.currentTime; } } // pause play back Pause = action(() => { this._ele!.pause(); - this.mediaState = "paused"; + this.mediaState = media_state.Paused; }); // play audio for documents created during recording @@ -251,32 +201,30 @@ export class AudioBox extends ViewBoxAnnotatableComponent< // play back the audio from time @action - playFrom = (seekTimeInSeconds: number, endTime: number = this.trimEnd, fullPlay: boolean = false) => { + playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false): any => { clearTimeout(this._play); if (Number.isNaN(this._ele?.duration)) { setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500); - } else if (this._ele && AudioBox.Enabled) { - if (seekTimeInSeconds < 0) { - if (seekTimeInSeconds > -1) { - setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000); - } else { - this.Pause(); - } - } else if (this.trimStart <= endTime && seekTimeInSeconds <= this.trimEnd) { - const start = Math.max(this.trimStart, seekTimeInSeconds); - const end = Math.min(this.trimEnd, endTime); + } + else if (this.timeline && this._ele && AudioBox.Enabled) { + const end = Math.min(this.timeline.trimEnd, endTime ?? this.timeline.trimEnd); + const start = Math.max(this.timeline.trimStart, seekTimeInSeconds); + if (seekTimeInSeconds >= 0 && this.timeline.trimStart <= end && seekTimeInSeconds <= this.timeline.trimEnd) { this._ele.currentTime = start; this._ele.play(); - runInAction(() => (this.mediaState = "playing")); - if (endTime !== this.clipDuration) { - this._play = setTimeout( + runInAction(() => this.mediaState = media_state.Playing); + if (end !== this.timeline.clipDuration) { + return this._play = setTimeout( () => { - this._ended = fullPlay ? true : this._ended; + if (fullPlay) this.setAnchorTime(this.timeline!.trimStart); this.Pause(); }, (end - start) * 1000 ); // use setTimeout to play a specific duration } + } + if (seekTimeInSeconds < 0 && seekTimeInSeconds > -1) { + setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000); } else { this.Pause(); } @@ -285,7 +233,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< // update the recording time updateRecordTime = () => { - if (this.mediaState === "recording") { + if (this.mediaState === media_state.Recording) { setTimeout(this.updateRecordTime, 30); if (this._paused) { this._pausedTime += (new Date().getTime() - this._recordStart) / 1000; @@ -300,22 +248,19 @@ export class AudioBox extends ViewBoxAnnotatableComponent< recordAudioAnnotation = async () => { this._stream = await navigator.mediaDevices.getUserMedia({ audio: true }); this._recorder = new MediaRecorder(this._stream); - this.dataDoc[this.props.fieldKey + "-recordingStart"] = new DateField( - new Date() - ); + this.dataDoc[this.fieldKey + "-recordingStart"] = new DateField(); DocUtils.ActiveRecordings.push(this); this._recorder.ondataavailable = async (e: any) => { const [{ result }] = await Networking.UploadFilesToServer(e.data); if (!(result instanceof Error)) { - this.props.Document[this.props.fieldKey] = new AudioField(result.accessPaths.agnostic.client); - if (this._trimEnd === undefined) this._trimEnd = this.clipDuration; + this.props.Document[this.fieldKey] = new AudioField(result.accessPaths.agnostic.client); } }; this._recordStart = new Date().getTime(); - runInAction(() => (this.mediaState = "recording")); + runInAction(() => this.mediaState = media_state.Recording); setTimeout(this.updateRecordTime, 0); this._recorder.start(); - setTimeout(() => this._recorder && this.stopRecording(), 60 * 60 * 1000); // stop after an hour + setTimeout(() => this.stopRecording(), 60 * 60 * 1000); // stop after an hour } // context menu @@ -353,17 +298,16 @@ export class AudioBox extends ViewBoxAnnotatableComponent< // stops the recording stopRecording = action(() => { - this._recorder.stop(); - this._recorder = undefined; - this.dataDoc[this.fieldKey + "-duration"] = - (new Date().getTime() - this._recordStart - this.pauseTime) / 1000; - this.mediaState = "paused"; - this._trimEnd = this.clipDuration; - this.layoutDoc.clipStart = 0; - this.layoutDoc.clipEnd = this.clipDuration; - this._stream?.getAudioTracks()[0].stop(); - const ind = DocUtils.ActiveRecordings.indexOf(this); - ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); + if (this._recorder) { + this._recorder.stop(); + this._recorder = undefined; + this.dataDoc[this.fieldKey + "-duration"] = + (new Date().getTime() - this._recordStart - this.pauseTime) / 1000; + this.mediaState = media_state.Paused; + this._stream?.getAudioTracks()[0].stop(); + const ind = DocUtils.ActiveRecordings.indexOf(this); + ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); + } }); // button for starting and stopping the recording @@ -376,16 +320,9 @@ export class AudioBox extends ViewBoxAnnotatableComponent< // for play button Play = (e?: any) => { - let start; - if (this._ended || this._ele!.currentTime === this.clipDuration) { - start = NumCast(this.layoutDoc.clipStart); - this._ended = false; - } - else { - start = this._ele!.currentTime; - } - - this.playFrom(start, this.trimEnd, true); + const eleTime = this._ele!.currentTime; + const start = eleTime === this.timeline?.trimDuration ? NumCast(this.layoutDoc.trimStart) : eleTime; + this.playFrom(start, undefined, true); e?.stopPropagation?.(); } @@ -402,7 +339,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< ); Doc.GetProto(newDoc).recordingSource = this.dataDoc; Doc.GetProto(newDoc).recordingStart = ComputedField.MakeFunction( - `self.recordingSource["${this.props.fieldKey}-recordingStart"]` + `self.recordingSource["${this.fieldKey}-recordingStart"]` ); Doc.GetProto(newDoc).mediaState = ComputedField.MakeFunction( "self.recordingSource.mediaState" @@ -420,7 +357,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< // returns the path of the audio file @computed get path() { - const field = Cast(this.props.Document[this.props.fieldKey], AudioField); + const field = Cast(this.props.Document[this.fieldKey], AudioField); const path = field instanceof AudioField ? field.url.href : ""; return path === nullAudio ? "" : path; } @@ -460,68 +397,33 @@ export class AudioBox extends ViewBoxAnnotatableComponent< e.stopPropagation(); } - playing = () => this.mediaState === "playing"; + playing = () => this.mediaState === media_state.Playing; playLink = (link: Doc) => { - const stack = this._stackedTimeline.current; if (link.annotationOn === this.rootDoc) { if (!this.layoutDoc.dontAutoPlayFollowedLinks) { - this.playFrom(stack?.anchorStart(link) || 0, stack?.anchorEnd(link)); + this.playFrom(this.timeline?.anchorStart(link) || 0, this.timeline?.anchorEnd(link)); } else { this._ele!.currentTime = this.layoutDoc._currentTimecode = - stack?.anchorStart(link) || 0; + this.timeline?.anchorStart(link) || 0; } } else { this.links .filter((l) => l.anchor1 === link || l.anchor2 === link) .forEach((l) => { const { la1, la2 } = this.getLinkData(l); - const startTime = stack?.anchorStart(la1) || stack?.anchorStart(la2); - const endTime = stack?.anchorEnd(la1) || stack?.anchorEnd(la2); + const startTime = this.timeline?.anchorStart(la1) || this.timeline?.anchorStart(la2); + const endTime = this.timeline?.anchorEnd(la1) || this.timeline?.anchorEnd(la2); if (startTime !== undefined) { if (!this.layoutDoc.dontAutoPlayFollowedLinks) { - endTime - ? this.playFrom(startTime, endTime) - : this.playFrom(startTime); + this.playFrom(startTime, endTime); } else { - this._ele!.currentTime = this.layoutDoc._currentTimecode = - startTime; + this._ele!.currentTime = this.layoutDoc._currentTimecode = startTime; } } }); } } - // shows trim controls - @action - startTrim = (scope: number) => { - if (this.mediaState === "playing") { - this.Pause(); - } - this._trimming = scope; - } - - // hides trim controls and displays new clip - @undoBatch - finishTrim = action(() => { - if (this.mediaState === "playing") { - this.Pause(); - } - this.layoutDoc.clipStart = this.trimStart; - this.layoutDoc.clipEnd = this.trimEnd; - this.setAnchorTime(Math.max(Math.min(this.trimEnd, this._ele!.currentTime), this.trimStart)); - this._trimming = AudioBox.ScopeNone; - }); - - @action - setStartTrim = (newStart: number) => { - this._trimStart = newStart; - } - - @action - setEndTrim = (newEnd: number) => { - this._trimEnd = newEnd; - } - isActiveChild = () => this._isAnyChildContentActive; timelineWhenChildContentsActiveChanged = (isActive: boolean) => this.props.whenChildContentsActiveChanged( @@ -543,9 +445,6 @@ export class AudioBox extends ViewBoxAnnotatableComponent< this.heightPercent) / 100 // panelHeight * heightPercent is player height. * heightPercent is timeline height (as per css inline) timelineWidth = () => this.props.PanelWidth() - AudioBox.playheadWidth; - trimEndFunc = () => this.trimEnd; - trimStartFunc = () => this.trimStart; - trimDurationFunc = () => this.trimDuration; @computed get renderTimeline() { return ( ); } + // hides trim controls and displays new clip + @undoBatch + finishTrim = action(() => { + this.Pause(); + this.setAnchorTime(Math.max(Math.min(this.timeline?.trimEnd || 0, this._ele!.currentTime), this.timeline?.trimStart || 0)); + this.timeline?.StopTrimming(); + }); + startTrim = (scope: TrimScope) => { + this.Pause(); + this.timeline?.StartTrimming(scope); + } + onClipPointerDown = (e: React.PointerEvent) => { - setupMoveUpEvents(this, e, returnFalse, returnFalse, action((e: PointerEvent, doubleTap?: boolean) => { + this.timeline && setupMoveUpEvents(this, e, returnFalse, returnFalse, action((e: PointerEvent, doubleTap?: boolean) => { if (doubleTap) { - this.startTrim(AudioBox.ScopeAll); - } else { - this._trimming !== AudioBox.ScopeNone ? this.finishTrim() : this.startTrim(AudioBox.ScopeClip); + this.startTrim(TrimScope.All); + } else if (this.timeline) { + this.Pause(); + this.timeline.IsTrimming !== TrimScope.None ? this.finishTrim() : this.startTrim(TrimScope.Clip); } })); } @@ -613,12 +509,12 @@ export class AudioBox extends ViewBoxAnnotatableComponent< return (
{ - if (r && this._stackedTimeline.current) { - this.dropDisposer?.(); - this.dropDisposer = DragManager.MakeDropTarget(r, + if (r && this.timeline) { + this._dropDisposer?.(); + this._dropDisposer = DragManager.MakeDropTarget(r, (e, de) => { const [xp, yp] = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); - de.complete.docDragData && this._stackedTimeline.current!.internalDocDrop(e, de, de.complete.docDragData, xp); + de.complete.docDragData && this.timeline!.internalDocDrop(e, de, de.complete.docDragData, xp); } , this.layoutDoc, undefined); } @@ -644,7 +540,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< size={this.props.PanelHeight() < 36 ? "1x" : "2x"} />
- {this.mediaState === "recording" || this.mediaState === "paused" ? ( + {this.mediaState === media_state.Recording || this.mediaState === media_state.Playing ? (
e.stopPropagation()}>
{" "}
@@ -727,10 +623,10 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
{this.audio}
- {formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this.clipStart)))} + {this.timeline && formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this.timeline.clipStart)))}
- {formatTime(Math.round(NumCast(this.clipDuration)))} + {this.timeline && formatTime(Math.round(NumCast(this.timeline?.clipDuration)))}
@@ -738,4 +634,4 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
); } -} +} \ No newline at end of file diff --git a/src/client/views/nodes/VideoBox.tsx b/src/client/views/nodes/VideoBox.tsx index 8b33842ff..af65cce9f 100644 --- a/src/client/views/nodes/VideoBox.tsx +++ b/src/client/views/nodes/VideoBox.tsx @@ -16,7 +16,7 @@ import { CurrentUserUtils } from "../../util/CurrentUserUtils"; import { SelectionManager } from "../../util/SelectionManager"; import { SnappingManager } from "../../util/SnappingManager"; import { CollectionFreeFormView } from "../collections/collectionFreeForm/CollectionFreeFormView"; -import { CollectionStackedTimeline } from "../collections/CollectionStackedTimeline"; +import { CollectionStackedTimeline, TrimScope } from "../collections/CollectionStackedTimeline"; import { ContextMenu } from "../ContextMenu"; import { ContextMenuProps } from "../ContextMenuItem"; import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from "../DocComponent"; @@ -31,6 +31,7 @@ import { DocumentManager } from "../../util/DocumentManager"; import { DocumentType } from "../../documents/DocumentTypes"; import { Tooltip } from "@material-ui/core"; import { AnchorMenu } from "../pdf/AnchorMenu"; +import { undoBatch } from "../../util/UndoManager"; const path = require('path'); type VideoDocument = makeInterface<[typeof documentSchema]>; @@ -101,7 +102,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent this.layoutDoc.autoPlayAnchors = !this.layoutDoc.autoPlayAnchors, icon: "expand-arrows-alt" }); subitems.push({ description: "Toggle Native Controls", event: action(() => VideoBox._nativeControls = !VideoBox._nativeControls), icon: "expand-arrows-alt" }); subitems.push({ description: "Copy path", event: () => { Utils.CopyText(url); }, icon: "expand-arrows-alt" }); + subitems.push({ description: "Start Trim All", event: () => this.startTrim(TrimScope.All), icon: "expand-arrows-alt" }); + subitems.push({ description: "Start Trim Clip", event: () => this.startTrim(TrimScope.Clip), icon: "expand-arrows-alt" }); + subitems.push({ description: "Stop Trim", event: () => this.finishTrim(), icon: "expand-arrows-alt" }); + subitems.push({ description: "Copy path", event: () => { Utils.CopyText(url); }, icon: "expand-arrows-alt" }); ContextMenu.Instance.addItem({ description: "Options...", subitems: subitems, icon: "video" }); } } @@ -483,34 +488,47 @@ export class VideoBox extends ViewBoxAnnotatableComponent { + playFrom = (seekTimeInSeconds: number, endTime?: number) => { clearTimeout(this._playRegionTimer); - this._playRegionDuration = endTime - seekTimeInSeconds; if (Number.isNaN(this.player?.duration)) { setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500); - } else if (this.player) { - if (seekTimeInSeconds < 0) { - if (seekTimeInSeconds > -1) { - setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000); - } else { - this.Pause(); - } - } else if (seekTimeInSeconds <= this.player.duration) { - this.player.currentTime = seekTimeInSeconds; + } + else if (this.player) { + const end = Math.min(this.timeline?.trimEnd ?? this.duration, endTime ?? this.timeline?.trimEnd ?? this.duration); + const start = Math.max(this.timeline?.trimStart ?? 0, seekTimeInSeconds); + this._playRegionDuration = end - seekTimeInSeconds; + if (seekTimeInSeconds >= 0 && (this.timeline?.trimStart || 0) <= end && seekTimeInSeconds <= (this.timeline?.trimEnd || this.duration)) { + this.player.currentTime = start; this._audioPlayer && (this._audioPlayer.currentTime = seekTimeInSeconds); this.player.play(); this._audioPlayer?.play(); runInAction(() => this._playing = true); if (endTime !== this.duration) { - this._playRegionTimer = setTimeout(() => this.Pause(), (this._playRegionDuration) * 1000); // use setTimeout to play a specific duration + return this._playRegionTimer = + setTimeout(() => this.Pause(), (this._playRegionDuration) * 1000); // use setTimeout to play a specific duration } + } + if (seekTimeInSeconds < 0 && seekTimeInSeconds > -1) { + setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000); } else { this.Pause(); } } } + // hides trim controls and displays new clip + @undoBatch + finishTrim = action(() => { + this.Pause(); + this._stackedTimeline.current?.StopTrimming(); + }); + startTrim = (scope: TrimScope) => { + this.Pause(); + this._stackedTimeline.current?.StartTrimming(scope); + } + playLink = (doc: Doc) => { const startTime = Math.max(0, (this._stackedTimeline.current?.anchorStart(doc) || 0)); @@ -524,7 +542,9 @@ export class VideoBox extends ViewBoxAnnotatableComponent this._playing; timelineWhenChildContentsActiveChanged = action((isActive: boolean) => this.props.whenChildContentsActiveChanged(this._isAnyChildContentActive = isActive)); timelineScreenToLocal = () => this.props.ScreenToLocalTransform().scale(this.scaling()).translate(0, -this.heightPercent / 100 * this.props.PanelHeight()); - setAnchorTime = (time: number) => this.player!.currentTime = this.layoutDoc._currentTimecode = time; + setAnchorTime = (time: number) => { + this.player!.currentTime = this.layoutDoc._currentTimecode = time; + } timelineHeight = () => this.props.PanelHeight() * (100 - this.heightPercent) / 100; trimEndFunc = () => this.duration; @computed get renderTimeline() { @@ -550,15 +570,6 @@ export class VideoBox extends ViewBoxAnnotatableComponent
; } -- cgit v1.2.3-70-g09d2 From a793b7e981e46b7c98bee6cce3faaf3a5b05f6ae Mon Sep 17 00:00:00 2001 From: bobzel Date: Sun, 26 Sep 2021 02:55:23 -0400 Subject: fixed warnings, fixed bug following link w/auto play. plus refactorings. --- src/client/views/AudioWaveform.tsx | 11 +- .../collections/CollectionStackedTimeline.tsx | 78 ++-- src/client/views/nodes/AudioBox.tsx | 291 +++++++-------- src/client/views/nodes/LabelBox.tsx | 2 +- src/client/views/nodes/VideoBox.tsx | 405 ++++++++++----------- 5 files changed, 373 insertions(+), 414 deletions(-) (limited to 'src/client/views/AudioWaveform.tsx') diff --git a/src/client/views/AudioWaveform.tsx b/src/client/views/AudioWaveform.tsx index 0e9c00656..270b3869c 100644 --- a/src/client/views/AudioWaveform.tsx +++ b/src/client/views/AudioWaveform.tsx @@ -25,14 +25,13 @@ export interface AudioWaveformProps { export class AudioWaveform extends React.Component { public static NUMBER_OF_BUCKETS = 100; _disposer: IReactionDisposer | undefined; - @computed get _waveHeight() { - return Math.max(50, this.props.PanelHeight()); - } - + @computed get waveHeight() { return Math.max(50, this.props.PanelHeight()); } @computed get clipStart() { return this.props.clipStart; } @computed get clipEnd() { return this.props.clipEnd; } - audioBucketField = (start: number, end: number) => { return "audioBuckets/" + start.toFixed(2).replace(".", "_") + "/" + end.toFixed(2).replace(".", "_"); } @computed get audioBuckets() { return Cast(this.props.layoutDoc[this.audioBucketField(this.clipStart, this.clipEnd)], listSpec("number"), []); } + + audioBucketField = (start: number, end: number) => "audioBuckets/" + start.toFixed(2).replace(".", "_") + "/" + end.toFixed(2).replace(".", "_"); + componentWillUnmount() { this._disposer?.(); } @@ -87,7 +86,7 @@ export class AudioWaveform extends React.Component {
- ); - } - constructor(props: any) { super(props); // onClick play scripts @@ -157,8 +129,23 @@ export class CollectionStackedTimeline extends CollectionSubView< } } - anchorStart = (anchor: Doc) => NumCast(anchor._timecodeToShow, NumCast(anchor[this.props.startTag])) - anchorEnd = (anchor: Doc, val: any = null) => NumCast(anchor._timecodeToHide, NumCast(anchor[this.props.endTag], val)); + public get IsTrimming() { return this._trimming; } + + @action + public StartTrimming(scope: TrimScope) { + this._trimStart = this.clipStart; + this._trimEnd = this.clipEnd; + this._trimming = scope; + } + @action + public StopTrimming() { + this.layoutDoc.clipStart = this.trimStart; + this.layoutDoc.clipEnd = this.trimEnd; + this._trimming = TrimScope.None; + } + + anchorStart = (anchor: Doc) => NumCast(anchor._timecodeToShow, NumCast(anchor[this.props.startTag])); + anchorEnd = (anchor: Doc, val: any = null) => NumCast(anchor._timecodeToHide, NumCast(anchor[this.props.endTag], val) ?? null); toTimeline = (screen_delta: number, width: number) => { return Math.max( this.clipStart, @@ -345,13 +332,13 @@ export class CollectionStackedTimeline extends CollectionSubView< const localPt = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); const x = localPt[0] - docDragData.offset[0]; const timelinePt = this.toTimeline(x, this.props.PanelWidth()); - for (let i = 0; i < docDragData.droppedDocuments.length; i++) { - const d = Doc.GetProto(docDragData.droppedDocuments[i]); + docDragData.droppedDocuments.forEach(drop => { + const d = Doc.GetProto(drop); if (this.anchorEnd(d) !== undefined) { d[d._timecodeToHide === undefined ? this.props.endTag : "_timecodeToHide"] = timelinePt + this.anchorEnd(d) - this.anchorStart(d); } d[d._timecodToShow === undefined ? this.props.startTag : "_timecodToShow"] = timelinePt; - } + }); return true; } @@ -483,9 +470,12 @@ export class CollectionStackedTimeline extends CollectionSubView< } dictationHeightPercent = 50; - dictationHeight = () => (this.props.PanelHeight() * (100 - this.dictationHeightPercent)) / 100 - timelineContentHeight = () => (this.props.PanelHeight() * this.dictationHeightPercent) / 100 - dictationScreenToLocalTransform = () => this.props.ScreenToLocalTransform().translate(0, -this.timelineContentHeight()) + dictationHeight = () => (this.props.PanelHeight() * (100 - this.dictationHeightPercent)) / 100; + timelineContentHeight = () => (this.props.PanelHeight() * this.dictationHeightPercent) / 100; + dictationScreenToLocalTransform = () => this.props.ScreenToLocalTransform().translate(0, -this.timelineContentHeight()); + isContentActive = () => this.props.isSelected() || this.props.isContentActive(); + currentTimecode = () => this.currentTime; + @computed get renderDictation() { const dictation = Cast(this.dataDoc[this.props.dictationKey], Doc, null); return !dictation ? null : ( @@ -537,9 +527,19 @@ export class CollectionStackedTimeline extends CollectionSubView<
); } + @computed get selectionContainer() { + const markerEnd = CollectionStackedTimeline.SelectingRegion === this ? this.currentTime : this._markerEnd; + return markerEnd === undefined ? null : ( +
+ ); + } - isContentActive = () => this.props.isSelected() || this.props.isContentActive(); - currentTimecode = () => this.currentTime; render() { const timelineContentWidth = this.props.PanelWidth(); const overlaps: { @@ -760,7 +760,7 @@ class StackedTimelineAnchor extends React.Component e, (e) => { if (!undo) undo = UndoManager.StartBatch("drag anchor"); - return changeAnchor(anchor, left, newTime(e)) + return changeAnchor(anchor, left, newTime(e)); }, (e) => { this.props.setTime(newTime(e)); diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 6e6558030..fa78d2301 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -1,6 +1,6 @@ import React = require("react"); import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; -import { action, computed, IReactionDisposer, observable, reaction, runInAction } from "mobx"; +import { action, computed, IReactionDisposer, observable, runInAction } from "mobx"; import { observer } from "mobx-react"; import { DateField } from "../../../fields/DateField"; import { Doc, DocListCast } from "../../../fields/Doc"; @@ -19,10 +19,8 @@ import { CollectionStackedTimeline, TrimScope } from "../collections/CollectionS import { ContextMenu } from "../ContextMenu"; import { ContextMenuProps } from "../ContextMenuItem"; import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from "../DocComponent"; -import { Colors } from "../global/globalEnums"; import "./AudioBox.scss"; import { FieldView, FieldViewProps } from "./FieldView"; -import { LinkDocPreview } from "./LinkDocPreview"; declare class MediaRecorder { constructor(e: any); // whatever MediaRecorder has @@ -36,10 +34,14 @@ enum media_state { Recording = "recording", Paused = "paused", Playing = "playing" -}; +} @observer export class AudioBox extends ViewBoxAnnotatableComponent(AudioDocument) { public static LayoutString(fieldKey: string) { return FieldView.LayoutString(AudioBox, fieldKey); } + public static SetScrubTime = action((timeInMillisFrom1970: number) => { + AudioBox._scrubTime = 0; + AudioBox._scrubTime = timeInMillisFrom1970; + }); public static Enabled = false; static playheadWidth = 40; // width of playhead static heightPercent = 75; // height of timeline in percent of height of audioBox. @@ -63,13 +65,30 @@ export class AudioBox extends ViewBoxAnnotatableComponent { - AudioBox._scrubTime = 0; - AudioBox._scrubTime = timeInMillisFrom1970; - }); + get timeline() { return this._stackedTimeline.current; } // can't be computed since it's not observable + + componentWillUnmount() { + this._dropDisposer?.(); + Object.values(this._disposers).forEach((disposer) => disposer?.()); + const ind = DocUtils.ActiveRecordings.indexOf(this); + ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); + } + + @action + componentDidMount() { + this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link. + + this.mediaState = this.path ? media_state.Paused : undefined as any as media_state; + + this.path && this.setAnchorTime(NumCast(this.layoutDoc.clipStart)); + this.path && this.timecodeChanged(); + } getLinkData(l: Doc) { let la1 = l.anchor1 as Doc; @@ -100,34 +119,15 @@ export class AudioBox extends ViewBoxAnnotatableComponent disposer?.()); - const ind = DocUtils.ActiveRecordings.indexOf(this); - ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); - } - - @action - componentDidMount() { - this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link. - - this.mediaState = this.path ? media_state.Paused : undefined as any as media_state; - - this.path && this.setAnchorTime(NumCast(this.layoutDoc.clipStart)); - this.path && this.timecodeChanged(); - } - // for updating the timecode @action timecodeChanged = () => { if (this.mediaState !== media_state.Recording && this._ele) { this.links - .map((l) => this.getLinkData(l)) + .map(l => this.getLinkData(l)) .forEach(({ la1, la2, linkTime }) => { - if ( - linkTime > NumCast(this.layoutDoc._currentTimecode) && - linkTime < this._ele!.currentTime - ) { + if (linkTime > NumCast(this.layoutDoc._currentTimecode) && + linkTime < this._ele!.currentTime) { Doc.linkFollowHighlight(la1); } }); @@ -135,23 +135,11 @@ export class AudioBox extends ViewBoxAnnotatableComponent { - this._ele!.pause(); - this.mediaState = media_state.Paused; - }); - - // play audio for documents created during recording - playFromTime = (absoluteTime: number) => { - this.recordingStart && - this.playFrom((absoluteTime - this.recordingStart) / 1000); - } - // play back the audio from time @action - playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false): any => { - clearTimeout(this._play); - if (Number.isNaN(this._ele?.duration)) { + playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false) => { + clearTimeout(this._play); // abort any previous clip ending + if (Number.isNaN(this._ele?.duration)) { // audio element isn't loaded yet... wait 1/2 second and try again setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500); } else if (this.timeline && this._ele && AudioBox.Enabled) { @@ -160,18 +148,13 @@ export class AudioBox extends ViewBoxAnnotatableComponent= 0 && this.timeline.trimStart <= end && seekTimeInSeconds <= this.timeline.trimEnd) { this._ele.currentTime = start; this._ele.play(); - runInAction(() => this.mediaState = media_state.Playing); - if (end !== this.timeline.clipDuration) { - return this._play = setTimeout( - () => { - if (fullPlay) this.setAnchorTime(this.timeline!.trimStart); - this.Pause(); - }, - (end - start) * 1000 - ); // use setTimeout to play a specific duration - } - } else if (seekTimeInSeconds < 0 && seekTimeInSeconds > -1) { - setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000); + this.mediaState = media_state.Playing; + this._play = setTimeout( + () => { + if (fullPlay) this.setAnchorTime(this.timeline!.trimStart); + this.Pause(); + }, + (end - start) * 1000); } else { this.Pause(); } @@ -185,8 +168,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent this.mediaState = media_state.Recording); - setTimeout(this.updateRecordTime, 0); + setTimeout(this.updateRecordTime); this._recorder.start(); - setTimeout(() => this.stopRecording(), 60 * 60 * 1000); // stop after an hour + setTimeout(this.stopRecording, 60 * 60 * 1000); // stop after an hour + } + + @action + stopRecording = () => { + if (this._recorder) { + this._recorder.stop(); + this._recorder = undefined; + this.dataDoc[this.fieldKey + "-duration"] = (new Date().getTime() - this._recordStart - this.pauseTime) / 1000; + this.mediaState = media_state.Paused; + this._stream?.getAudioTracks()[0].stop(); + const ind = DocUtils.ActiveRecordings.indexOf(this); + ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); + } } // context menu @@ -243,22 +238,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent { - if (this._recorder) { - this._recorder.stop(); - this._recorder = undefined; - this.dataDoc[this.fieldKey + "-duration"] = - (new Date().getTime() - this._recordStart - this.pauseTime) / 1000; - this.mediaState = media_state.Paused; - this._stream?.getAudioTracks()[0].stop(); - const ind = DocUtils.ActiveRecordings.indexOf(this); - ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); - } - }); - // button for starting and stopping the recording - recordClick = (e: React.MouseEvent) => { + Record = (e: React.MouseEvent) => { if (e.button === 0 && !e.ctrlKey) { this._recorder ? this.stopRecording() : this.recordAudioAnnotation(); e.stopPropagation(); @@ -267,12 +248,19 @@ export class AudioBox extends ViewBoxAnnotatableComponent { - const eleTime = this._ele!.currentTime; - const start = eleTime === this.timeline?.trimDuration ? this.timeline.trimStart : eleTime; + const eleTime = this._ele?.currentTime || 0; + const start = eleTime === this.timeline?.trimEnd ? this.timeline.trimStart : eleTime; this.playFrom(start, undefined, true); e?.stopPropagation?.(); } + // pause play back + @action + Pause = () => { + this._ele?.pause(); + this.mediaState = media_state.Paused; + } + // creates a text document for dictation onFile = (e: any) => { const newDoc = CurrentUserUtils.GetNewTextDoc( @@ -302,27 +290,6 @@ export class AudioBox extends ViewBoxAnnotatableComponent { - const duration = this._ele?.duration; - if (duration && duration !== Infinity) { - this.dataDoc[this.fieldKey + "-duration"] = duration; - } - })} - className={`audiobox-control${this.props.isContentActive() ? "-interactive" : ""}`}> - - Not supported. - ; - } - // pause the time during recording phase @action recordPause = (e: React.MouseEvent) => { @@ -341,14 +308,12 @@ export class AudioBox extends ViewBoxAnnotatableComponent this.mediaState === media_state.Playing; playLink = (link: Doc) => { if (link.annotationOn === this.rootDoc) { if (!this.layoutDoc.dontAutoPlayFollowedLinks) { this.playFrom(this.timeline?.anchorStart(link) || 0, this.timeline?.anchorEnd(link)); } else { - this._ele!.currentTime = this.layoutDoc._currentTimecode = - this.timeline?.anchorStart(link) || 0; + this._ele!.currentTime = this.layoutDoc._currentTimecode = this.timeline?.anchorStart(link) || 0; } } else { this.links @@ -368,6 +333,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent this.mediaState === media_state.Playing; isActiveChild = () => this._isAnyChildContentActive; timelineWhenChildContentsActiveChanged = (isActive: boolean) => this.props.whenChildContentsActiveChanged( @@ -380,54 +346,21 @@ export class AudioBox extends ViewBoxAnnotatableComponent { - (this._ele!.currentTime = this.layoutDoc._currentTimecode = time); - } + setAnchorTime = (time: number) => this._ele!.currentTime = this.layoutDoc._currentTimecode = time; + timelineWidth = () => this.props.PanelWidth() - AudioBox.playheadWidth; timelineHeight = () => (((this.props.PanelHeight() * AudioBox.heightPercent) / 100) * AudioBox.heightPercent) / 100 // panelHeight * heightPercent is player height. * heightPercent is timeline height (as per css inline) - timelineWidth = () => this.props.PanelWidth() - AudioBox.playheadWidth; - @computed get renderTimeline() { - return ( - - ); - } - // hides trim controls and displays new clip + + @undoBatch - finishTrim = action(() => { + finishTrim = () => { // hides trim controls and displays new clip this.Pause(); this.setAnchorTime(Math.max(Math.min(this.timeline?.trimEnd || 0, this._ele!.currentTime), this.timeline?.trimStart || 0)); this.timeline?.StopTrimming(); - }); + } + startTrim = (scope: TrimScope) => { this.Pause(); this.timeline?.StartTrimming(scope); @@ -444,6 +377,18 @@ export class AudioBox extends ViewBoxAnnotatableComponent { + if (r && this.timeline) { + this._dropDisposer?.(); + this._dropDisposer = DragManager.MakeDropTarget(r, + (e, de) => { + const [xp, yp] = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); + de.complete.docDragData && this.timeline!.internalDocDrop(e, de, de.complete.docDragData, xp); + }, + this.layoutDoc, undefined); + } + } + @computed get recordingControls() { return
@@ -453,8 +398,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent
{[media_state.Recording, media_state.Playing].includes(this.mediaState) ? -
e.stopPropagation()}> -
+
e.stopPropagation()}> +
@@ -522,16 +467,52 @@ export class AudioBox extends ViewBoxAnnotatableComponent; } - setupTimelineDrop = (r: HTMLDivElement | null) => { - if (r && this.timeline) { - this._dropDisposer?.(); - this._dropDisposer = DragManager.MakeDropTarget(r, - (e, de) => { - const [xp, yp] = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); - de.complete.docDragData && this.timeline!.internalDocDrop(e, de, de.complete.docDragData, xp); - }, - this.layoutDoc, undefined); - } + @computed get renderTimeline() { + return ( + + ); + } + // returns the html audio element + @computed get audio() { + return ; } render() { diff --git a/src/client/views/nodes/LabelBox.tsx b/src/client/views/nodes/LabelBox.tsx index 935c878ee..97b1aac86 100644 --- a/src/client/views/nodes/LabelBox.tsx +++ b/src/client/views/nodes/LabelBox.tsx @@ -111,7 +111,7 @@ export class LabelBox extends ViewBoxBaseComponent<(FieldViewProps & LabelBoxPro verticalAlign: "center", textAlign: "center", whiteSpace: "nowrap" - }) + }); } }}>{label.startsWith("#") ? (null) : label}
diff --git a/src/client/views/nodes/VideoBox.tsx b/src/client/views/nodes/VideoBox.tsx index 3435c2a24..2befb4128 100644 --- a/src/client/views/nodes/VideoBox.tsx +++ b/src/client/views/nodes/VideoBox.tsx @@ -1,5 +1,6 @@ import React = require("react"); import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; +import { Tooltip } from "@material-ui/core"; import { action, computed, IReactionDisposer, observable, ObservableMap, reaction, runInAction, untracked } from "mobx"; import { observer } from "mobx-react"; import * as rp from 'request-promise'; @@ -8,13 +9,16 @@ import { documentSchema } from "../../../fields/documentSchemas"; import { InkTool } from "../../../fields/InkField"; import { makeInterface } from "../../../fields/Schema"; import { Cast, NumCast, StrCast } from "../../../fields/Types"; -import { AudioField, nullAudio, VideoField } from "../../../fields/URLField"; -import { emptyFunction, formatTime, OmitKeys, returnOne, setupMoveUpEvents, Utils, returnFalse, returnZero } from "../../../Utils"; +import { AudioField, VideoField } from "../../../fields/URLField"; +import { emptyFunction, formatTime, OmitKeys, returnFalse, returnOne, setupMoveUpEvents, Utils } from "../../../Utils"; import { Docs, DocUtils } from "../../documents/Documents"; +import { DocumentType } from "../../documents/DocumentTypes"; import { Networking } from "../../Network"; import { CurrentUserUtils } from "../../util/CurrentUserUtils"; +import { DocumentManager } from "../../util/DocumentManager"; import { SelectionManager } from "../../util/SelectionManager"; import { SnappingManager } from "../../util/SnappingManager"; +import { undoBatch } from "../../util/UndoManager"; import { CollectionFreeFormView } from "../collections/collectionFreeForm/CollectionFreeFormView"; import { CollectionStackedTimeline, TrimScope } from "../collections/CollectionStackedTimeline"; import { ContextMenu } from "../ContextMenu"; @@ -22,16 +26,10 @@ import { ContextMenuProps } from "../ContextMenuItem"; import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from "../DocComponent"; import { DocumentDecorations } from "../DocumentDecorations"; import { MarqueeAnnotator } from "../MarqueeAnnotator"; +import { AnchorMenu } from "../pdf/AnchorMenu"; import { StyleProp } from "../StyleProvider"; import { FieldView, FieldViewProps } from './FieldView'; -import { LinkDocPreview } from "./LinkDocPreview"; import "./VideoBox.scss"; -import { DragManager } from "../../util/DragManager"; -import { DocumentManager } from "../../util/DocumentManager"; -import { DocumentType } from "../../documents/DocumentTypes"; -import { Tooltip } from "@material-ui/core"; -import { AnchorMenu } from "../pdf/AnchorMenu"; -import { undoBatch } from "../../util/UndoManager"; const path = require('path'); type VideoDocument = makeInterface<[typeof documentSchema]>; @@ -40,14 +38,30 @@ const VideoDocument = makeInterface(documentSchema); @observer export class VideoBox extends ViewBoxAnnotatableComponent(VideoDocument) { public static LayoutString(fieldKey: string) { return FieldView.LayoutString(VideoBox, fieldKey); } + static async convertDataUri(imageUri: string, returnedFilename: string) { + try { + const posting = Utils.prepend("/uploadURI"); + const returnedUri = await rp.post(posting, { + body: { + uri: imageUri, + name: returnedFilename + }, + json: true, + }); + return returnedUri; + + } catch (e) { + console.log("VideoBox :" + e); + } + } static _youtubeIframeCounter: number = 0; - static Instance: VideoBox; static heightPercent = 60; // height of timeline in percent of height of videoBox. private _disposers: { [name: string]: IReactionDisposer } = {}; private _youtubePlayer: YT.Player | undefined = undefined; private _videoRef: HTMLVideoElement | null = null; private _youtubeIframeId: number = -1; private _youtubeContentCreated = false; + private _audioPlayer: HTMLAudioElement | null = null; private _stackedTimeline = React.createRef(); private _mainCont: React.RefObject = React.createRef(); private _annotationLayer: React.RefObject = React.createRef(); @@ -62,47 +76,52 @@ export class VideoBox extends ViewBoxAnnotatableComponent arr[arr.length - 1])(field.url.href.split("/")) : ""; + } + // returns the path of the audio file + @computed get audiopath() { + const field = Cast(this.props.Document[this.props.fieldKey + '-audio'], AudioField, null); + const vfield = Cast(this.dataDoc[this.fieldKey], VideoField, null); + return field?.url.href ?? vfield?.url.href ?? ""; } + private get timeline() { return this._stackedTimeline.current; } private get transition() { return this._clicking ? "left 0.5s, width 0.5s, height 0.5s" : ""; } public get player(): HTMLVideoElement | null { return this._videoRef; } - constructor(props: Readonly) { - super(props); - VideoBox.Instance = this; - } - - getAnchor = () => { - const timecode = Cast(this.layoutDoc._currentTimecode, "number", null); - const marquee = AnchorMenu.Instance.GetAnchor?.(); - return CollectionStackedTimeline.createAnchor(this.rootDoc, this.dataDoc, this.annotationKey, "_timecodeToShow"/* videoStart */, "_timecodeToHide" /* videoEnd */, timecode ? timecode : undefined, undefined, marquee) || this.rootDoc; + componentDidMount() { + this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link. + if (this.youtubeVideoId) { + const youtubeaspect = 400 / 315; + const nativeWidth = Doc.NativeWidth(this.layoutDoc); + const nativeHeight = Doc.NativeHeight(this.layoutDoc); + if (!nativeWidth || !nativeHeight) { + if (!nativeWidth) Doc.SetNativeWidth(this.dataDoc, 600); + Doc.SetNativeHeight(this.dataDoc, (nativeWidth || 600) / youtubeaspect); + this.layoutDoc._height = (this.layoutDoc._width || 0) / youtubeaspect; + } + } } - videoLoad = () => { - const aspect = this.player!.videoWidth / this.player!.videoHeight; - Doc.SetNativeWidth(this.dataDoc, this.player!.videoWidth); - Doc.SetNativeHeight(this.dataDoc, this.player!.videoHeight); - this.layoutDoc._height = (this.layoutDoc._width || 0) / aspect; - if (Number.isFinite(this.player!.duration)) { - this.dataDoc[this.fieldKey + "-duration"] = this.player!.duration; - } + componentWillUnmount() { + this.Pause(); + Object.keys(this._disposers).forEach(d => this._disposers[d]?.()); } @action public Play = (update: boolean = true) => { this._playing = true; + const eleTime = this.player?.currentTime || 0; + const start = eleTime >= (this.timeline?.trimEnd || 0) ? this.timeline?.trimStart || 0 : eleTime; try { this._audioPlayer && this.player && (this._audioPlayer.currentTime = this.player?.currentTime); - update && this.player && this.playFrom(this.player.currentTime); + update && this.player && this.playFrom(start, undefined, true); update && this._audioPlayer?.play(); update && this._youtubePlayer?.playVideo(); this._youtubePlayer && !this._playTimer && (this._playTimer = setInterval(this.updateTimecode, 5)); @@ -190,7 +209,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent { + createRealSummaryLink = (imagePath: string, downX?: number, downY?: number) => { const url = !imagePath.startsWith("/") ? Utils.CorsProxy(imagePath) : imagePath; const width = this.layoutDoc._width || 1; const height = this.layoutDoc._height || 0; @@ -208,11 +227,25 @@ export class VideoBox extends ViewBoxAnnotatableComponent { + const timecode = Cast(this.layoutDoc._currentTimecode, "number", null); + const marquee = AnchorMenu.Instance.GetAnchor?.(); + return CollectionStackedTimeline.createAnchor(this.rootDoc, this.dataDoc, this.annotationKey, "_timecodeToShow"/* videoStart */, "_timecodeToHide" /* videoEnd */, timecode ? timecode : undefined, undefined, marquee) || this.rootDoc; + } + + videoLoad = () => { + const aspect = this.player!.videoWidth / this.player!.videoHeight; + Doc.SetNativeWidth(this.dataDoc, this.player!.videoWidth); + Doc.SetNativeHeight(this.dataDoc, this.player!.videoHeight); + this.layoutDoc._height = (this.layoutDoc._width || 0) / aspect; + if (Number.isFinite(this.player!.duration)) { + this.dataDoc[this.fieldKey + "-duration"] = this.player!.duration; + } + } + @action updateTimecode = () => { this.player && (this.layoutDoc._currentTimecode = this.player.currentTime); - this.layoutDoc.clipEnd = this.layoutDoc.clipEnd ? Math.min(this.duration, NumCast(this.layoutDoc.clipEnd)) : this.duration; - this._trimEnd = this._trimEnd ? Math.min(this.duration, this._trimEnd) : this.duration; try { this._youtubePlayer && (this.layoutDoc._currentTimecode = this._youtubePlayer.getCurrentTime?.()); } catch (e) { @@ -220,25 +253,6 @@ export class VideoBox extends ViewBoxAnnotatableComponent this._disposers[d]?.()); - } - @action setVideoRef = (vref: HTMLVideoElement | null) => { this._videoRef = vref; @@ -252,23 +266,6 @@ export class VideoBox extends ViewBoxAnnotatableComponent { const field = Cast(this.dataDoc[this.props.fieldKey], VideoField); if (field) { @@ -294,48 +291,8 @@ export class VideoBox extends ViewBoxAnnotatableComponent this._audioPlayer = e; - @computed get content() { - const field = Cast(this.dataDoc[this.fieldKey], VideoField); - const interactive = CurrentUserUtils.SelectedTool !== InkTool.None || !this.props.isSelected() ? "" : "-interactive"; - const style = "videoBox-content" + (this._fullScreen ? "-fullScreen" : "") + interactive; - return !field ?
Loading
: -
-
- - {!this.audiopath || this.audiopath === field.url.href ? (null) : - } -
-
; - } - - @computed get youtubeVideoId() { - const field = Cast(this.dataDoc[this.props.fieldKey], VideoField); - return field && field.url.href.indexOf("youtube") !== -1 ? ((arr: string[]) => arr[arr.length - 1])(field.url.href.split("/")) : ""; - } - - @action youtubeIframeLoaded = (e: any) => { + @action + youtubeIframeLoaded = (e: any) => { if (!this._youtubeContentCreated) { this._forceCreateYouTubeIFrame = !this._forceCreateYouTubeIFrame; return; @@ -344,7 +301,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent { + loadYouTube = (iframe: any) => { let started = true; const onYoutubePlayerStateChange = (event: any) => runInAction(() => { if (started && event.data === YT.PlayerState.PLAYING) { @@ -376,39 +333,6 @@ export class VideoBox extends ViewBoxAnnotatableComponent{"playback"}
} key="play" placement="bottom"> -
- -
- , - {"timecode"}
} key="time" placement="bottom"> -
- {formatTime(curTime)} - {" " + Math.floor((curTime - Math.trunc(curTime)) * 100).toString().padStart(2, "0")} -
- , - {"view full screen"}
} key="full" placement="bottom"> -
- -
- ]; - return
- {[...(VideoBox._nativeControls ? [] : nonNativeControls), - {"snapshot current frame"}
} key="snap" placement="bottom"> -
- -
- , - {"show annotation timeline"}
} key="timeline" placement="bottom"> -
- -
- ,]} -
; - } onPlayDown = () => this._playing ? this.Pause() : this.Play(); @@ -425,10 +349,11 @@ export class VideoBox extends ViewBoxAnnotatableComponent this.Snapshot()); } - onTimelineHdlDown = action((e: React.PointerEvent) => { + @action + onTimelineHdlDown = (e: React.PointerEvent) => { this._clicking = true; setupMoveUpEvents(this, e, - action((e: PointerEvent) => { + action(encodeURIComponent => { this._clicking = false; if (this.props.isContentActive()) { const local = this.props.ScreenToLocalTransform().scale(this.props.scaling?.() || 1).transformPoint(e.clientX, e.clientY); @@ -440,28 +365,17 @@ export class VideoBox extends ViewBoxAnnotatableComponent this._clicking = false), 500); }, this.props.isContentActive(), this.props.isContentActive()); - }); + } onResetDown = (e: React.PointerEvent) => { setupMoveUpEvents(this, e, - (e: PointerEvent) => { + e => { this.Seek(Math.max(0, (this.layoutDoc._currentTimecode || 0) + Math.sign(e.movementX) * 0.0333)); e.stopImmediatePropagation(); return false; }, emptyFunction, - (e: PointerEvent) => this.layoutDoc._currentTimecode = 0); - } - - @computed get youtubeContent() { - this._youtubeIframeId = VideoBox._youtubeIframeCounter++; - this._youtubeContentCreated = this._forceCreateYouTubeIFrame ? true : true; - const style = "videoBox-content-YouTube" + (this._fullScreen ? "-fullScreen" : ""); - const start = untracked(() => Math.round((this.layoutDoc._currentTimecode || 0))); - return