From 7e0a60dc39ce2ad3fc8f90197f5ade7fe5e5b112 Mon Sep 17 00:00:00 2001 From: mehekj Date: Sat, 11 Sep 2021 11:47:22 -0400 Subject: fixed record buttons and trimmed marker width appearance --- src/client/views/collections/CollectionStackedTimeline.tsx | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) (limited to 'src/client/views/collections') diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index 51e05e278..56621d6d5 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -548,10 +548,14 @@ export class CollectionStackedTimeline extends CollectionSubView< ); const left = this.props.trimming ? (start / this.duration) * timelineContentWidth - : (start - this.trimStart) / this.props.trimDuration * timelineContentWidth; + : Math.max((start - this.trimStart) / this.props.trimDuration * timelineContentWidth, 0); const top = (d.level / maxLevel) * this.timelineContentHeight(); const timespan = end - start; - const width = (timespan / this.props.trimDuration) * timelineContentWidth; + let width = (timespan / this.props.trimDuration) * timelineContentWidth; + width = (!this.props.trimming && left == 0) ? + width - ((this.trimStart - start) / this.props.trimDuration * timelineContentWidth) : width; + width = (!this.props.trimming && this.trimEnd < end) ? + width - ((end - this.trimEnd) / this.props.trimDuration * timelineContentWidth) : width; const height = this.timelineContentHeight() / maxLevel return this.props.Document.hideAnchors ? null : (
Date: Mon, 20 Sep 2021 22:25:03 -0400 Subject: partially added dragging timeline markers --- .../collections/CollectionStackedTimeline.tsx | 11 +- src/client/views/nodes/AudioBox.scss | 61 ++++------ src/client/views/nodes/AudioBox.tsx | 129 +++++++++++---------- src/client/views/nodes/VideoBox.tsx | 10 ++ 4 files changed, 108 insertions(+), 103 deletions(-) (limited to 'src/client/views/collections') diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index 8608700aa..0ef6f852a 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -354,6 +354,13 @@ export class CollectionStackedTimeline extends CollectionSubView< // determine x coordinate of drop and assign it to the documents being dragged --- see internalDocDrop of collectionFreeFormView.tsx for how it's done when dropping onto a 2D freeform view + const x = docDragData.offset[0]; + const timelineContentWidth = this.props.PanelWidth(); + for (let i = 0; i < docDragData.droppedDocuments.length; i++) { + const d = docDragData.droppedDocuments[i]; + d._timecodeToShow = x / timelineContentWidth * this.props.trimDuration + NumCast(d._timecodeToShow); + d._timecodeToHide = x / timelineContentWidth * this.props.trimDuration + NumCast(d._timecodeToHide); + } return true; } @@ -573,14 +580,14 @@ export class CollectionStackedTimeline extends CollectionSubView< const left = this.props.trimming ? (start / this.duration) * timelineContentWidth : Math.max((start - this.trimStart) / this.props.trimDuration * timelineContentWidth, 0); - const top = (d.level / maxLevel) * this.timelineContentHeight(); + const top = (d.level / maxLevel) * this.timelineContentHeight() + 15; const timespan = end - start; let width = (timespan / this.props.trimDuration) * timelineContentWidth; width = (!this.props.trimming && left == 0) ? width - ((this.trimStart - start) / this.props.trimDuration * timelineContentWidth) : width; width = (!this.props.trimming && this.trimEnd < end) ? width - ((end - this.trimEnd) / this.props.trimDuration * timelineContentWidth) : width; - const height = this.timelineContentHeight() / maxLevel + const height = (this.timelineContentHeight()) / maxLevel; return this.props.Document.hideAnchors ? null : (
- (this.layoutDoc.dontAutoPlayFollowedLinks = - !this.layoutDoc.dontAutoPlayFollowedLinks), + (this.layoutDoc.dontAutoPlayFollowedLinks = + !this.layoutDoc.dontAutoPlayFollowedLinks), icon: "expand-arrows-alt", }); funcs.push({ @@ -637,77 +638,77 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
) : ( -
- +
+ RECORD -
- )} +
+ )} ) : ( +
+
-
-
- {" "} - -
-
- -
-
- {this.renderTimeline} -
- {this.audio} -
- {this._trimming ? - formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode))) - : formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this._trimStart)))} -
-
- {this._trimming || !this._trimEnd ? - formatTime(Math.round(NumCast(this.duration))) - : formatTime(Math.round(NumCast(this.trimDuration)))} -
+ {" "} + +
+
+ +
+
+ {this.renderTimeline} +
+ {this.audio} +
+ {this._trimming ? + formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode))) + : formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this._trimStart)))} +
+
+ {this._trimming || !this._trimEnd ? + formatTime(Math.round(NumCast(this.duration))) + : formatTime(Math.round(NumCast(this.trimDuration)))}
- )} +
+ )}
); } diff --git a/src/client/views/nodes/VideoBox.tsx b/src/client/views/nodes/VideoBox.tsx index 90de3227f..7dd82ad13 100644 --- a/src/client/views/nodes/VideoBox.tsx +++ b/src/client/views/nodes/VideoBox.tsx @@ -60,9 +60,17 @@ export class VideoBox extends ViewBoxAnnotatableComponent { this.player && (this.layoutDoc._currentTimecode = this.player.currentTime); + this.layoutDoc.clipEnd = this.layoutDoc.clipEnd ? Math.min(this.duration, NumCast(this.layoutDoc.clipEnd)) : this.duration; + this._trimEnd = this._trimEnd ? Math.min(this.duration, this._trimEnd) : this.duration; try { this._youtubePlayer && (this.layoutDoc._currentTimecode = this._youtubePlayer.getCurrentTime?.()); } catch (e) { -- cgit v1.2.3-70-g09d2 From ad921f6ee14735be136784e90c824632086aa43a Mon Sep 17 00:00:00 2001 From: bobzel Date: Wed, 22 Sep 2021 13:33:34 -0400 Subject: fixed dragging anchor on audio timeline to position correctly. fixed being able to drop anywhere on audio box. fixed undoing drag anchor and drag handles of audio anchors. --- src/client/views/DocumentDecorations.tsx | 4 +- .../collections/CollectionStackedTimeline.tsx | 49 ++++--- src/client/views/nodes/AudioBox.tsx | 153 +++++++++++---------- 3 files changed, 112 insertions(+), 94 deletions(-) (limited to 'src/client/views/collections') diff --git a/src/client/views/DocumentDecorations.tsx b/src/client/views/DocumentDecorations.tsx index d785d5419..29fcee822 100644 --- a/src/client/views/DocumentDecorations.tsx +++ b/src/client/views/DocumentDecorations.tsx @@ -426,8 +426,8 @@ export class DocumentDecorations extends React.Component<{ PanelWidth: number, P const canOpen = SelectionManager.Views().some(docView => !docView.props.Document._stayInCollection && !docView.props.Document.isGroup && !docView.props.Document.hideOpenButton); const canDelete = SelectionManager.Views().some(docView => { const collectionAcl = docView.props.ContainingCollectionView ? GetEffectiveAcl(docView.props.ContainingCollectionDoc?.[DataSym]) : AclEdit; - return (!docView.rootDoc._stayInCollection || docView.rootDoc.isInkMask) && - (collectionAcl === AclAdmin || collectionAcl === AclEdit || GetEffectiveAcl(docView.rootDoc) === AclAdmin); + //return (!docView.rootDoc._stayInCollection || docView.rootDoc.isInkMask) && + return (collectionAcl === AclAdmin || collectionAcl === AclEdit || GetEffectiveAcl(docView.rootDoc) === AclAdmin); }); const topBtn = (key: string, icon: string, pointerDown: undefined | ((e: React.PointerEvent) => void), click: undefined | ((e: any) => void), title: string) => ( {title}
} placement="top"> diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index 9577256c9..970947b12 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -30,7 +30,7 @@ import { LinkManager } from "../../util/LinkManager"; import { Scripting } from "../../util/Scripting"; import { SelectionManager } from "../../util/SelectionManager"; import { Transform } from "../../util/Transform"; -import { undoBatch } from "../../util/UndoManager"; +import { undoBatch, UndoManager } from "../../util/UndoManager"; import { AudioWaveform } from "../AudioWaveform"; import { CollectionSubView } from "../collections/CollectionSubView"; import { LightboxView } from "../LightboxView"; @@ -354,12 +354,13 @@ export class CollectionStackedTimeline extends CollectionSubView< // determine x coordinate of drop and assign it to the documents being dragged --- see internalDocDrop of collectionFreeFormView.tsx for how it's done when dropping onto a 2D freeform view - const x = docDragData.offset[0]; + const localPt = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); + const x = localPt[0] - docDragData.offset[0]; const timelineContentWidth = this.props.PanelWidth(); for (let i = 0; i < docDragData.droppedDocuments.length; i++) { - const d = docDragData.droppedDocuments[i]; - d._timecodeToShow = x / timelineContentWidth * this.props.trimDuration + NumCast(d._timecodeToShow); - d._timecodeToHide = x / timelineContentWidth * this.props.trimDuration + NumCast(d._timecodeToHide); + const d = Doc.GetProto(docDragData.droppedDocuments[i]); + d._timecodeToHide = x / timelineContentWidth * this.props.trimDuration + NumCast(d._timecodeToHide) - NumCast(d._timecodeToShow); + d._timecodeToShow = x / timelineContentWidth * this.props.trimDuration; } return true; @@ -774,13 +775,19 @@ class StackedTimelineAnchor extends React.Component } return false; }; + var undo: UndoManager.Batch | undefined; + setupMoveUpEvents( this, e, - (e) => changeAnchor(anchor, left, newTime(e)), + (e) => { + if (!undo) undo = UndoManager.StartBatch("drag anchor"); + return changeAnchor(anchor, left, newTime(e)) + }, (e) => { this.props.setTime(newTime(e)); this.props._timeline?.releasePointerCapture(e.pointerId); + undo?.end(); }, emptyFunction ); @@ -862,21 +869,21 @@ class StackedTimelineAnchor extends React.Component {inner.view} {!inner.anchor.view || !SelectionManager.IsSelected(inner.anchor.view) ? null : ( - <> -
this.onAnchorDown(e, this.props.mark, true)} - /> -
- this.onAnchorDown(e, this.props.mark, false) - } - /> - - )} + <> +
this.onAnchorDown(e, this.props.mark, true)} + /> +
+ this.onAnchorDown(e, this.props.mark, false) + } + /> + + )} ); } diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 830c73278..9281cac9a 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -6,7 +6,7 @@ import { IReactionDisposer, observable, reaction, - runInAction, + runInAction } from "mobx"; import { observer } from "mobx-react"; import { DateField } from "../../../fields/DateField"; @@ -20,19 +20,19 @@ import { emptyFunction, formatTime } from "../../../Utils"; import { DocUtils } from "../../documents/Documents"; import { Networking } from "../../Network"; import { CurrentUserUtils } from "../../util/CurrentUserUtils"; +import { DragManager } from "../../util/DragManager"; import { SnappingManager } from "../../util/SnappingManager"; import { CollectionStackedTimeline } from "../collections/CollectionStackedTimeline"; import { ContextMenu } from "../ContextMenu"; import { ContextMenuProps } from "../ContextMenuItem"; import { ViewBoxAnnotatableComponent, - ViewBoxAnnotatableProps, + ViewBoxAnnotatableProps } from "../DocComponent"; +import { Colors } from "../global/globalEnums"; import "./AudioBox.scss"; import { FieldView, FieldViewProps } from "./FieldView"; import { LinkDocPreview } from "./LinkDocPreview"; -import { faLessThan } from "@fortawesome/free-solid-svg-icons"; -import { Colors } from "../global/globalEnums"; import e = require("connect-flash"); declare class MediaRecorder { @@ -167,11 +167,13 @@ export class AudioBox extends ViewBoxAnnotatableComponent< } componentWillUnmount() { + this.dropDisposer?.(); Object.values(this._disposers).forEach((disposer) => disposer?.()); const ind = DocUtils.ActiveRecordings.indexOf(this); ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); } + private dropDisposer?: DragManager.DragDropDisposer; @action componentDidMount() { this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link. @@ -337,8 +339,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent< (this.layoutDoc.dontAutoPlayFollowedLinks ? "" : "Don't") + " play when link is selected", event: () => - (this.layoutDoc.dontAutoPlayFollowedLinks = - !this.layoutDoc.dontAutoPlayFollowedLinks), + (this.layoutDoc.dontAutoPlayFollowedLinks = + !this.layoutDoc.dontAutoPlayFollowedLinks), icon: "expand-arrows-alt", }); funcs.push({ @@ -591,6 +593,17 @@ export class AudioBox extends ViewBoxAnnotatableComponent< : ""; return (
{ + if (r && this._stackedTimeline.current) { + this.dropDisposer?.(); + this.dropDisposer = DragManager.MakeDropTarget(r, + (e, de) => { + const [xp, yp] = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); + de.complete.docDragData && this._stackedTimeline.current!.internalDocDrop(e, de, de.complete.docDragData, xp); + } + , this.layoutDoc, undefined); + } + }} className="audiobox-container" onContextMenu={this.specificContextMenu} onClick={ @@ -607,9 +620,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
@@ -638,77 +649,77 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
) : ( -
- +
+ RECORD -
- )} +
+ )}
) : ( -
-
+
- {" "} - -
-
- -
-
- {this.renderTimeline} -
- {this.audio} -
- {this._trimming ? - formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode))) - : formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this._trimStart)))} -
-
- {this._trimming || !this._trimEnd ? - formatTime(Math.round(NumCast(this.duration))) - : formatTime(Math.round(NumCast(this.trimDuration)))} +
+ {" "} + +
+
+ +
+
+ {this.renderTimeline} +
+ {this.audio} +
+ {this._trimming ? + formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode))) + : formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this._trimStart)))} +
+
+ {this._trimming || !this._trimEnd ? + formatTime(Math.round(NumCast(this.duration))) + : formatTime(Math.round(NumCast(this.trimDuration)))} +
-
- )} + )}
); } -- cgit v1.2.3-70-g09d2 From bafc47fe100002e37c7abcada3dc44f3bfb66f62 Mon Sep 17 00:00:00 2001 From: bobzel Date: Thu, 23 Sep 2021 11:16:53 -0400 Subject: a bunch of fixes to audio timelines to support undoing clips, simplifying audioWaveform and having it recompute when a clip is made. --- src/client/views/AudioWaveform.tsx | 117 ++++++++------------- .../collections/CollectionStackedTimeline.tsx | 16 ++- src/client/views/nodes/AudioBox.tsx | 59 ++++++----- src/client/views/nodes/VideoBox.tsx | 12 ++- 4 files changed, 91 insertions(+), 113 deletions(-) (limited to 'src/client/views/collections') diff --git a/src/client/views/AudioWaveform.tsx b/src/client/views/AudioWaveform.tsx index 8f3b7c2cd..1e676e1f0 100644 --- a/src/client/views/AudioWaveform.tsx +++ b/src/client/views/AudioWaveform.tsx @@ -1,6 +1,6 @@ import React = require("react"); import axios from "axios"; -import { action, computed } from "mobx"; +import { action, computed, reaction, IReactionDisposer } from "mobx"; import { observer } from "mobx-react"; import Waveform from "react-audio-waveform"; import { Doc } from "../../fields/Doc"; @@ -12,7 +12,8 @@ import "./AudioWaveform.scss"; import { Colors } from "./global/globalEnums"; export interface AudioWaveformProps { - duration: number; + duration: number; // length of media clip + rawDuration: number; // length of underlying media data mediaPath: string; layoutDoc: Doc; trimming: boolean; @@ -22,19 +23,28 @@ export interface AudioWaveformProps { @observer export class AudioWaveform extends React.Component { public static NUMBER_OF_BUCKETS = 100; + _disposer: IReactionDisposer | undefined; @computed get _waveHeight() { return Math.max(50, this.props.PanelHeight()); } + + @computed get clipStart() { return NumCast(this.props.layoutDoc.clipStart); } + @computed get clipEnd() { return NumCast(this.props.layoutDoc.clipEnd, this.props.rawDuration); } + @computed get audioBuckets() { return Cast(this.props.layoutDoc.audioBuckets, listSpec("number"), []); } + @computed get audioBucketRange() { return Cast(this.props.layoutDoc.audioBucketRange, listSpec("number"), [-1, -1]); } + componentWillUnmount() { + this._disposer?.(); + } componentDidMount() { - const audioBuckets = Cast( - this.props.layoutDoc.audioBuckets, - listSpec("number"), - [] - ); - if (!audioBuckets.length) { - this.props.layoutDoc.audioBuckets = new List([0, 0]); /// "lock" to prevent other views from computing the same data - setTimeout(this.createWaveformBuckets); - } + this._disposer = reaction(() => [this.clipStart, this.clipEnd, this.audioBuckets.length, ...this.audioBucketRange], + (range) => { + if (range[2] !== AudioWaveform.NUMBER_OF_BUCKETS || range[3] !== range[0] || range[4] !== range[1]) { + this.props.layoutDoc.audioBucketRange = new List([range[0], range[1]]); // setting these values here serves as a "lock" to prevent multiple attempts to create the waveform at nearly the same time. + this.props.layoutDoc.audioBuckets = new List(numberRange(AudioWaveform.NUMBER_OF_BUCKETS)); + setTimeout(this.createWaveformBuckets); + } + }, { fireImmediately: true }); + } // decodes the audio file into peaks for generating the waveform @@ -45,81 +55,44 @@ export class AudioWaveform extends React.Component { context.decodeAudioData( response.data, action((buffer) => { - const decodedAudioData = buffer.getChannelData(0); + const rawDecodedAudioData = buffer.getChannelData(0); + const startInd = this.clipStart / this.props.rawDuration; + const endInd = this.clipEnd / this.props.rawDuration; + const decodedAudioData = rawDecodedAudioData.slice(Math.floor(startInd * rawDecodedAudioData.length), Math.floor(endInd * rawDecodedAudioData.length)); const bucketDataSize = Math.floor( decodedAudioData.length / AudioWaveform.NUMBER_OF_BUCKETS ); const brange = Array.from(Array(bucketDataSize)); - this.props.layoutDoc.audioBuckets = new List( - numberRange(AudioWaveform.NUMBER_OF_BUCKETS).map( - (i: number) => - brange.reduce( - (p, x, j) => - Math.abs( - Math.max(p, decodedAudioData[i * bucketDataSize + j]) - ), - 0 - ) / 2 - ) + const bucketList = numberRange(AudioWaveform.NUMBER_OF_BUCKETS).map( + (i: number) => + brange.reduce( + (p, x, j) => + Math.abs( + Math.max(p, decodedAudioData[i * bucketDataSize + j]) + ), + 0 + ) / 2 ); + this.props.layoutDoc.audioBucketRange = new List([this.clipStart, this.clipEnd]); + this.props.layoutDoc.audioBuckets = new List(bucketList); }) ); } ); } - - @action - createTrimBuckets = () => { - const audioBuckets = Cast( - this.props.layoutDoc.audioBuckets, - listSpec("number"), - [] - ); - - const start = Math.floor( - (NumCast(this.props.layoutDoc.clipStart) / this.props.duration) * 100 - ); - const end = Math.floor( - (NumCast(this.props.layoutDoc.clipEnd) / this.props.duration) * 100 - ); - return audioBuckets.slice(start, end); - } - render() { - const audioBuckets = Cast( - this.props.layoutDoc.audioBuckets, - listSpec("number"), - [] - ); - return (
- {this.props.trimming || !this.props.layoutDoc.clipEnd ? ( - - ) : ( - - )} +
); } diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index 970947b12..0985e5b2e 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -61,9 +61,10 @@ export type CollectionStackedTimelineProps = { endTag: string; mediaPath: string; dictationKey: string; + rawDuration: number; trimming: boolean; - trimStart: number; - trimEnd: number; + trimStart: () => number; + trimEnd: () => number; trimDuration: number; setStartTrim: (newStart: number) => void; setEndTrim: (newEnd: number) => void; @@ -94,11 +95,11 @@ export class CollectionStackedTimeline extends CollectionSubView< } get trimStart() { - return this.props.trimStart; + return this.props.trimStart(); } get trimEnd() { - return this.props.trimEnd; + return this.props.trimEnd(); } get duration() { @@ -540,6 +541,7 @@ export class CollectionStackedTimeline extends CollectionSubView< return !this.props.mediaPath ? null : (
= this.layoutDoc._height; @observable _paused: boolean = false; @observable _trimming: boolean = false; - @observable _trimStart: number = NumCast(this.layoutDoc.clipStart) ? NumCast(this.layoutDoc.clipStart) : 0; - @observable _trimEnd: number = NumCast(this.layoutDoc.clipEnd) ? NumCast(this.layoutDoc.clipEnd) - : this.duration; + @observable _trimStart: number = NumCast(this.layoutDoc.clipStart); + @observable _trimEnd: number = NumCast(this.layoutDoc.clipEnd, this.duration); + @computed get trimStart() { return this._trimming ? this._trimStart : NumCast(this.layoutDoc.clipStart); } + @computed get trimEnd() { return this._trimming ? this._trimEnd : NumCast(this.layoutDoc.clipEnd, this.duration); } @computed get mediaState(): | undefined @@ -84,7 +86,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< | "recording" | "paused" | "playing" { - return this.dataDoc.mediaState as + return this.layoutDoc.mediaState as | undefined | "pendingRecording" | "recording" @@ -92,7 +94,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< | "playing"; } set mediaState(value) { - this.dataDoc.mediaState = value; + this.layoutDoc.mediaState = value; } public static SetScrubTime = action((timeInMillisFrom1970: number) => { AudioBox._scrubTime = 0; @@ -104,11 +106,15 @@ export class AudioBox extends ViewBoxAnnotatableComponent< DateField )?.date.getTime(); } - @computed get duration() { + @computed get rawDuration() { return NumCast(this.dataDoc[`${this.fieldKey}-duration`]); } + @computed get duration() { + return NumCast(this.layoutDoc.clipEnd, NumCast(this.layoutDoc.clipStart) + NumCast(this.dataDoc[`${this.fieldKey}-duration`])) - NumCast(this.layoutDoc.clipStart); + // NumCast(this.dataDoc[`${this.fieldKey}-duration`]); + } @computed get trimDuration() { - return this._trimming && this._trimEnd ? this.duration : this._trimEnd - this._trimStart; + return this.trimEnd - this.trimStart; } @computed get anchorDocs() { return DocListCast(this.dataDoc[this.annotationKey]); @@ -228,8 +234,6 @@ export class AudioBox extends ViewBoxAnnotatableComponent< runInAction( () => (this.dataDoc[this.fieldKey + "-duration"] = htmlEle.duration) ); - this.layoutDoc.clipEnd = this.layoutDoc.clipEnd ? Math.min(this.duration, NumCast(this.layoutDoc.clipEnd)) : this.duration; - this._trimEnd = this._trimEnd ? Math.min(this.duration, this._trimEnd) : this.duration; this.links .map((l) => this.getLinkData(l)) .forEach(({ la1, la2, linkTime }) => { @@ -259,7 +263,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< // play back the audio from time @action - playFrom = (seekTimeInSeconds: number, endTime: number = this._trimEnd, fullPlay: boolean = false) => { + playFrom = (seekTimeInSeconds: number, endTime: number = this.trimEnd, fullPlay: boolean = false) => { clearTimeout(this._play); if (Number.isNaN(this._ele?.duration)) { setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500); @@ -270,9 +274,9 @@ export class AudioBox extends ViewBoxAnnotatableComponent< } else { this.Pause(); } - } else if (this._trimStart <= endTime && seekTimeInSeconds <= this._trimEnd) { - const start = Math.max(this._trimStart, seekTimeInSeconds); - const end = Math.min(this._trimEnd, endTime); + } else if (this.trimStart <= endTime && seekTimeInSeconds <= this.trimEnd) { + const start = Math.max(this.trimStart, seekTimeInSeconds); + const end = Math.min(this.trimEnd, endTime); this._ele.currentTime = start; this._ele.play(); runInAction(() => (this.mediaState = "playing")); @@ -385,14 +389,14 @@ export class AudioBox extends ViewBoxAnnotatableComponent< Play = (e?: any) => { let start; if (this._ended || this._ele!.currentTime === this.duration) { - start = this._trimStart; + start = NumCast(this.layoutDoc.clipStart); this._ended = false; } else { start = this._ele!.currentTime; } - this.playFrom(start, this._trimEnd, true); + this.playFrom(start, this.trimEnd, true); e?.stopPropagation?.(); } @@ -502,16 +506,16 @@ export class AudioBox extends ViewBoxAnnotatableComponent< } // hides trim controls and displays new clip - @action - finishTrim = () => { + @undoBatch + finishTrim = action(() => { if (this.mediaState === "playing") { this.Pause(); } - this.layoutDoc.clipStart = this._trimStart; - this.layoutDoc.clipEnd = this._trimEnd; + this.layoutDoc.clipStart = this.trimStart; + this.layoutDoc.clipEnd = this.trimEnd; + this.setAnchorTime(Math.max(Math.min(this.trimEnd, this._ele!.currentTime), this.trimStart)); this._trimming = false; - this.setAnchorTime(Math.max(Math.min(this._trimEnd, this._ele!.currentTime), this._trimStart)); - } + }); @action setStartTrim = (newStart: number) => { @@ -544,6 +548,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent< this.heightPercent) / 100 // panelHeight * heightPercent is player height. * heightPercent is timeline height (as per css inline) timelineWidth = () => this.props.PanelWidth() - AudioBox.playheadWidth; + trimEndFunc = () => this.trimEnd; + trimStartFunc = () => this.trimStart; @computed get renderTimeline() { return ( {this._trimming ? formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode))) - : formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this._trimStart)))} + : formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this.trimStart)))}
{this._trimming || !this._trimEnd ? diff --git a/src/client/views/nodes/VideoBox.tsx b/src/client/views/nodes/VideoBox.tsx index 3fc460102..84eeacc29 100644 --- a/src/client/views/nodes/VideoBox.tsx +++ b/src/client/views/nodes/VideoBox.tsx @@ -9,7 +9,7 @@ import { InkTool } from "../../../fields/InkField"; import { makeInterface } from "../../../fields/Schema"; import { Cast, NumCast, StrCast } from "../../../fields/Types"; import { AudioField, nullAudio, VideoField } from "../../../fields/URLField"; -import { emptyFunction, formatTime, OmitKeys, returnOne, setupMoveUpEvents, Utils, returnFalse } from "../../../Utils"; +import { emptyFunction, formatTime, OmitKeys, returnOne, setupMoveUpEvents, Utils, returnFalse, returnZero } from "../../../Utils"; import { Docs, DocUtils } from "../../documents/Documents"; import { Networking } from "../../Network"; import { CurrentUserUtils } from "../../util/CurrentUserUtils"; @@ -526,6 +526,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent this.props.ScreenToLocalTransform().scale(this.scaling()).translate(0, -this.heightPercent / 100 * this.props.PanelHeight()); setAnchorTime = (time: number) => this.player!.currentTime = this.layoutDoc._currentTimecode = time; timelineHeight = () => this.props.PanelHeight() * (100 - this.heightPercent) / 100; + trimEndFunc = () => this.duration; @computed get renderTimeline() { return
{ }} - setEndTrim={() => { }} + setStartTrim={emptyFunction} + setEndTrim={emptyFunction} />
; } -- cgit v1.2.3-70-g09d2 From 676cc1ef15653590eecf7f588fe02dd7d75863cc Mon Sep 17 00:00:00 2001 From: bobzel Date: Thu, 23 Sep 2021 21:52:48 -0400 Subject: fixed trimming already trimmed clips. --- src/client/views/collections/CollectionStackedTimeline.tsx | 14 ++++++++------ src/client/views/nodes/AudioBox.tsx | 10 ++++------ src/client/views/nodes/VideoBox.tsx | 2 ++ 3 files changed, 14 insertions(+), 12 deletions(-) (limited to 'src/client/views/collections') diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index 0985e5b2e..12d70c05d 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -63,6 +63,8 @@ export type CollectionStackedTimelineProps = { dictationKey: string; rawDuration: number; trimming: boolean; + clipStart: number; + clipEnd: number; trimStart: () => number; trimEnd: () => number; trimDuration: number; @@ -332,7 +334,7 @@ export class CollectionStackedTimeline extends CollectionSubView< this.props.setEndTrim(Math.max( Math.min( this.trimEnd + (e.movementX / rect.width) * this.duration, - this.duration + this.props.clipStart + this.duration ), this.trimStart + this.minLength )); @@ -582,7 +584,7 @@ export class CollectionStackedTimeline extends CollectionSubView< start + (10 / timelineContentWidth) * this.duration ); const left = this.props.trimming ? - (start / this.duration) * timelineContentWidth + ((start - this.props.clipStart) / this.duration) * timelineContentWidth : Math.max((start - this.trimStart) / this.props.trimDuration * timelineContentWidth, 0); const top = (d.level / maxLevel) * this.timelineContentHeight() + 15; const timespan = end - start; @@ -631,7 +633,7 @@ export class CollectionStackedTimeline extends CollectionSubView< className="collectionStackedTimeline-current" style={{ left: this.props.trimming - ? `${(this.currentTime / this.duration) * 100}%` + ? `${((this.currentTime - this.props.clipStart) / this.duration) * 100}%` : `${(this.currentTime - this.trimStart) / (this.trimEnd - this.trimStart) * 100}%`, }} /> @@ -640,13 +642,13 @@ export class CollectionStackedTimeline extends CollectionSubView< <>
{ - if (!this.duration) { - this.timecodeChanged(); - } if (this.mediaState === "playing") { this.Pause(); } @@ -581,6 +577,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent< playLink={this.playLink} PanelWidth={this.timelineWidth} PanelHeight={this.timelineHeight} + clipStart={this.clipStart} + clipEnd={this.clipEnd} trimming={this._trimming} trimStart={this.trimStartFunc} trimEnd={this.trimEndFunc} diff --git a/src/client/views/nodes/VideoBox.tsx b/src/client/views/nodes/VideoBox.tsx index 84eeacc29..2485e7658 100644 --- a/src/client/views/nodes/VideoBox.tsx +++ b/src/client/views/nodes/VideoBox.tsx @@ -551,6 +551,8 @@ export class VideoBox extends ViewBoxAnnotatableComponent Date: Thu, 23 Sep 2021 22:01:00 -0400 Subject: renamed props for stackedTimeline --- .../collections/CollectionStackedTimeline.tsx | 58 +++++++++++----------- src/client/views/nodes/AudioBox.tsx | 7 +-- src/client/views/nodes/VideoBox.tsx | 6 +-- 3 files changed, 36 insertions(+), 35 deletions(-) (limited to 'src/client/views/collections') diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index 12d70c05d..8b937c278 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -50,7 +50,7 @@ import { DragManager } from "../../util/DragManager"; type PanZoomDocument = makeInterface<[]>; const PanZoomDocument = makeInterface(); export type CollectionStackedTimelineProps = { - duration: number; + clipDuration: number; Play: () => void; Pause: () => void; playLink: (linkDoc: Doc) => void; @@ -67,7 +67,7 @@ export type CollectionStackedTimelineProps = { clipEnd: number; trimStart: () => number; trimEnd: () => number; - trimDuration: number; + trimDuration: () => number; setStartTrim: (newStart: number) => void; setEndTrim: (newEnd: number) => void; }; @@ -91,7 +91,7 @@ export class CollectionStackedTimeline extends CollectionSubView< get minLength() { const rect = this._timeline?.getBoundingClientRect(); if (rect) { - return 0.05 * this.duration; + return 0.05 * this.clipDuration; } return 0; } @@ -104,8 +104,8 @@ export class CollectionStackedTimeline extends CollectionSubView< return this.props.trimEnd(); } - get duration() { - return this.props.duration; + get clipDuration() { + return this.props.clipDuration; } @computed get currentTime() { @@ -116,8 +116,8 @@ export class CollectionStackedTimeline extends CollectionSubView<
); @@ -166,7 +166,7 @@ export class CollectionStackedTimeline extends CollectionSubView< toTimeline = (screen_delta: number, width: number) => { return Math.max( this.trimStart, - Math.min(this.trimEnd, (screen_delta / width) * this.props.trimDuration + this.trimStart)); + Math.min(this.trimEnd, (screen_delta / width) * this.props.trimDuration() + this.trimStart)); } rangeClickScript = () => CollectionStackedTimeline.RangeScript; @@ -283,10 +283,10 @@ export class CollectionStackedTimeline extends CollectionSubView< undefined, () => { !wasPlaying && - (this.props.trimming && this.duration ? - this.props.setTime(((clientX - rect.x) / rect.width) * this.duration) + (this.props.trimming && this.clipDuration ? + this.props.setTime(((clientX - rect.x) / rect.width) * this.clipDuration) : - this.props.setTime(((clientX - rect.x) / rect.width) * this.props.trimDuration + this.trimStart) + this.props.setTime(((clientX - rect.x) / rect.width) * this.props.trimDuration() + this.trimStart) ); } ); @@ -305,7 +305,7 @@ export class CollectionStackedTimeline extends CollectionSubView< if (rect && this.props.isContentActive()) { this.props.setStartTrim(Math.min( Math.max( - this.trimStart + (e.movementX / rect.width) * this.duration, + this.trimStart + (e.movementX / rect.width) * this.clipDuration, 0 ), this.trimEnd - this.minLength @@ -333,8 +333,8 @@ export class CollectionStackedTimeline extends CollectionSubView< if (rect && this.props.isContentActive()) { this.props.setEndTrim(Math.max( Math.min( - this.trimEnd + (e.movementX / rect.width) * this.duration, - this.props.clipStart + this.duration + this.trimEnd + (e.movementX / rect.width) * this.clipDuration, + this.props.clipStart + this.clipDuration ), this.trimStart + this.minLength )); @@ -344,7 +344,7 @@ export class CollectionStackedTimeline extends CollectionSubView< emptyFunction, action((e, doubleTap) => { if (doubleTap) { - this.props.setEndTrim(this.duration); + this.props.setEndTrim(this.clipDuration); } }) ); @@ -362,8 +362,8 @@ export class CollectionStackedTimeline extends CollectionSubView< const timelineContentWidth = this.props.PanelWidth(); for (let i = 0; i < docDragData.droppedDocuments.length; i++) { const d = Doc.GetProto(docDragData.droppedDocuments[i]); - d._timecodeToHide = x / timelineContentWidth * this.props.trimDuration + NumCast(d._timecodeToHide) - NumCast(d._timecodeToShow); - d._timecodeToShow = x / timelineContentWidth * this.props.trimDuration; + d._timecodeToHide = x / timelineContentWidth * this.props.trimDuration() + NumCast(d._timecodeToHide) - NumCast(d._timecodeToShow); + d._timecodeToShow = x / timelineContentWidth * this.props.trimDuration(); } return true; @@ -469,7 +469,7 @@ export class CollectionStackedTimeline extends CollectionSubView< const x1 = this.anchorStart(m); const x2 = this.anchorEnd( m, - x1 + (10 / timelineContentWidth) * this.duration + x1 + (10 / timelineContentWidth) * this.clipDuration ); let max = 0; const overlappedLevels = new Set( @@ -544,7 +544,7 @@ export class CollectionStackedTimeline extends CollectionSubView<
@@ -642,14 +642,14 @@ export class CollectionStackedTimeline extends CollectionSubView< <>
@@ -666,8 +666,8 @@ export class CollectionStackedTimeline extends CollectionSubView<
diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 918933194..f6d6ff440 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -546,6 +546,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< timelineWidth = () => this.props.PanelWidth() - AudioBox.playheadWidth; trimEndFunc = () => this.trimEnd; trimStartFunc = () => this.trimStart; + trimDurationFunc = () => this.trimDuration; @computed get renderTimeline() { return ( diff --git a/src/client/views/nodes/VideoBox.tsx b/src/client/views/nodes/VideoBox.tsx index 2485e7658..8b33842ff 100644 --- a/src/client/views/nodes/VideoBox.tsx +++ b/src/client/views/nodes/VideoBox.tsx @@ -538,8 +538,6 @@ export class VideoBox extends ViewBoxAnnotatableComponent -- cgit v1.2.3-70-g09d2 From de658e50f58607e6a0bcb2cdaaca28ca81d83b50 Mon Sep 17 00:00:00 2001 From: bobzel Date: Thu, 23 Sep 2021 22:16:52 -0400 Subject: cleanup --- src/client/views/collections/CollectionStackedTimeline.tsx | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) (limited to 'src/client/views/collections') diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index 8b937c278..929bfa055 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -583,9 +583,7 @@ export class CollectionStackedTimeline extends CollectionSubView< d.anchor, start + (10 / timelineContentWidth) * this.clipDuration ); - const left = this.props.trimming ? - ((start - this.props.clipStart) / this.clipDuration) * timelineContentWidth - : Math.max((start - this.trimStart) / this.props.trimDuration() * timelineContentWidth, 0); + const left = Math.max((start - this.props.clipStart) / this.clipDuration * timelineContentWidth, 0); const top = (d.level / maxLevel) * this.timelineContentHeight() + 15; const timespan = end - start; const width = (timespan / this.clipDuration) * timelineContentWidth; @@ -632,9 +630,7 @@ export class CollectionStackedTimeline extends CollectionSubView<
@@ -649,8 +645,7 @@ export class CollectionStackedTimeline extends CollectionSubView< className="collectionStackedTimeline-trim-controls" style={{ left: `${((this.trimStart - this.props.clipStart) / this.clipDuration) * 100}%`, - width: `${((this.trimEnd - this.trimStart) / this.clipDuration) * 100 - }%`, + width: `${((this.trimEnd - this.trimStart) / this.clipDuration) * 100}%`, }} >
-- cgit v1.2.3-70-g09d2 From 9675e948be8a7ea2d86c8ca68a89c09452ece0e7 Mon Sep 17 00:00:00 2001 From: bobzel Date: Thu, 23 Sep 2021 23:22:39 -0400 Subject: added code for editing the original waveform, not a clip when trimming is activated. --- src/client/views/AudioWaveform.tsx | 25 ++++++++++++---------- .../collections/CollectionStackedTimeline.tsx | 2 ++ src/client/views/nodes/AudioBox.tsx | 13 ++++++++--- 3 files changed, 26 insertions(+), 14 deletions(-) (limited to 'src/client/views/collections') diff --git a/src/client/views/AudioWaveform.tsx b/src/client/views/AudioWaveform.tsx index 1e676e1f0..7d83ea3dc 100644 --- a/src/client/views/AudioWaveform.tsx +++ b/src/client/views/AudioWaveform.tsx @@ -17,6 +17,8 @@ export interface AudioWaveformProps { mediaPath: string; layoutDoc: Doc; trimming: boolean; + clipStart: number; + clipEnd: number; PanelHeight: () => number; } @@ -28,20 +30,22 @@ export class AudioWaveform extends React.Component { return Math.max(50, this.props.PanelHeight()); } - @computed get clipStart() { return NumCast(this.props.layoutDoc.clipStart); } - @computed get clipEnd() { return NumCast(this.props.layoutDoc.clipEnd, this.props.rawDuration); } - @computed get audioBuckets() { return Cast(this.props.layoutDoc.audioBuckets, listSpec("number"), []); } - @computed get audioBucketRange() { return Cast(this.props.layoutDoc.audioBucketRange, listSpec("number"), [-1, -1]); } + @computed get clipStart() { return this.props.clipStart; } + @computed get clipEnd() { return this.props.clipEnd; } + audioBucketField = (start: number, end: number) => { return "audioBuckets-" + start.toFixed(2) + "-" + end.toFixed(2); } + @computed get audioBuckets() { return Cast(this.props.layoutDoc[this.audioBucketField(this.clipStart, this.clipEnd)], listSpec("number"), []); } componentWillUnmount() { this._disposer?.(); } componentDidMount() { - this._disposer = reaction(() => [this.clipStart, this.clipEnd, this.audioBuckets.length, ...this.audioBucketRange], + this._disposer = reaction(() => [this.clipStart, this.clipEnd, this.audioBuckets.length], (range) => { - if (range[2] !== AudioWaveform.NUMBER_OF_BUCKETS || range[3] !== range[0] || range[4] !== range[1]) { - this.props.layoutDoc.audioBucketRange = new List([range[0], range[1]]); // setting these values here serves as a "lock" to prevent multiple attempts to create the waveform at nearly the same time. - this.props.layoutDoc.audioBuckets = new List(numberRange(AudioWaveform.NUMBER_OF_BUCKETS)); - setTimeout(this.createWaveformBuckets); + if (range[2] !== AudioWaveform.NUMBER_OF_BUCKETS) { + if (!this.props.layoutDoc[this.audioBucketField(range[0], range[1])]) { + // setting these values here serves as a "lock" to prevent multiple attempts to create the waveform at nerly the same time. + this.props.layoutDoc[this.audioBucketField(range[0], range[1])] = new List(numberRange(AudioWaveform.NUMBER_OF_BUCKETS)); + setTimeout(this.createWaveformBuckets); + } } }, { fireImmediately: true }); @@ -74,8 +78,7 @@ export class AudioWaveform extends React.Component { 0 ) / 2 ); - this.props.layoutDoc.audioBucketRange = new List([this.clipStart, this.clipEnd]); - this.props.layoutDoc.audioBuckets = new List(bucketList); + this.props.layoutDoc[this.audioBucketField(this.clipStart, this.clipEnd)] = new List(bucketList); }) ); } diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index 929bfa055..e00e66295 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -547,6 +547,8 @@ export class CollectionStackedTimeline extends CollectionSubView< duration={this.clipDuration} mediaPath={this.props.mediaPath} layoutDoc={this.layoutDoc} + clipStart={this.props.clipStart} + clipEnd={this.props.clipEnd} PanelHeight={this.timelineContentHeight} trimming={this.props.trimming} /> diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index f6d6ff440..6a25ffaeb 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -577,9 +577,16 @@ export class AudioBox extends ViewBoxAnnotatableComponent< PanelWidth={this.timelineWidth} PanelHeight={this.timelineHeight} rawDuration={this.rawDuration} - clipStart={this.clipStart} - clipEnd={this.clipEnd} - clipDuration={this.duration} + + // this edits the entire waveform when trimming is activated + clipStart={this._trimming ? 0 : this.clipStart} + clipEnd={this._trimming ? this.rawDuration : this.clipEnd} + clipDuration={this._trimming ? this.rawDuration : this.duration} + // this edits just the current waveform clip when trimming is activated + // clipStart={this.clipStart} + // clipEnd={this.clipEnd} + // clipDuration={this.duration} + trimming={this._trimming} trimStart={this.trimStartFunc} trimEnd={this.trimEndFunc} -- cgit v1.2.3-70-g09d2 From a4b3114f3792c80e20a3f40053ec4724729c1100 Mon Sep 17 00:00:00 2001 From: bobzel Date: Fri, 24 Sep 2021 00:40:30 -0400 Subject: added doubleclick on clip button to switch to editing entire timeline vs. single click to edit just the clip. fixed some audio timeline bugs with printing out current time and time span when trimming. cleaned up audioWaveform bucket fields. --- src/client/views/AudioWaveform.tsx | 24 +++---- .../collections/CollectionStackedTimeline.tsx | 13 ++-- src/client/views/nodes/AudioBox.tsx | 78 ++++++++++++---------- 3 files changed, 56 insertions(+), 59 deletions(-) (limited to 'src/client/views/collections') diff --git a/src/client/views/AudioWaveform.tsx b/src/client/views/AudioWaveform.tsx index 7d83ea3dc..f7b117130 100644 --- a/src/client/views/AudioWaveform.tsx +++ b/src/client/views/AudioWaveform.tsx @@ -32,27 +32,25 @@ export class AudioWaveform extends React.Component { @computed get clipStart() { return this.props.clipStart; } @computed get clipEnd() { return this.props.clipEnd; } - audioBucketField = (start: number, end: number) => { return "audioBuckets-" + start.toFixed(2) + "-" + end.toFixed(2); } + audioBucketField = (start: number, end: number) => { return "audioBuckets/" + start.toFixed(2).replace(".", "_") + "/" + end.toFixed(2).replace(".", "_"); } @computed get audioBuckets() { return Cast(this.props.layoutDoc[this.audioBucketField(this.clipStart, this.clipEnd)], listSpec("number"), []); } componentWillUnmount() { this._disposer?.(); } componentDidMount() { - this._disposer = reaction(() => [this.clipStart, this.clipEnd, this.audioBuckets.length], - (range) => { - if (range[2] !== AudioWaveform.NUMBER_OF_BUCKETS) { - if (!this.props.layoutDoc[this.audioBucketField(range[0], range[1])]) { - // setting these values here serves as a "lock" to prevent multiple attempts to create the waveform at nerly the same time. - this.props.layoutDoc[this.audioBucketField(range[0], range[1])] = new List(numberRange(AudioWaveform.NUMBER_OF_BUCKETS)); - setTimeout(this.createWaveformBuckets); - } + this._disposer = reaction(() => ({ clipStart: this.clipStart, clipEnd: this.clipEnd, fieldKey: this.audioBucketField(this.clipStart, this.clipEnd) }), + ({ clipStart, clipEnd, fieldKey }) => { + if (!this.props.layoutDoc[fieldKey]) { + // setting these values here serves as a "lock" to prevent multiple attempts to create the waveform at nerly the same time. + this.props.layoutDoc[fieldKey] = new List(numberRange(AudioWaveform.NUMBER_OF_BUCKETS)); + setTimeout(() => this.createWaveformBuckets(fieldKey, clipStart, clipEnd)); } }, { fireImmediately: true }); } // decodes the audio file into peaks for generating the waveform - createWaveformBuckets = async () => { + createWaveformBuckets = async (fieldKey: string, clipStart: number, clipEnd: number) => { axios({ url: this.props.mediaPath, responseType: "arraybuffer" }).then( (response) => { const context = new window.AudioContext(); @@ -60,8 +58,8 @@ export class AudioWaveform extends React.Component { response.data, action((buffer) => { const rawDecodedAudioData = buffer.getChannelData(0); - const startInd = this.clipStart / this.props.rawDuration; - const endInd = this.clipEnd / this.props.rawDuration; + const startInd = clipStart / this.props.rawDuration; + const endInd = clipEnd / this.props.rawDuration; const decodedAudioData = rawDecodedAudioData.slice(Math.floor(startInd * rawDecodedAudioData.length), Math.floor(endInd * rawDecodedAudioData.length)); const bucketDataSize = Math.floor( @@ -78,7 +76,7 @@ export class AudioWaveform extends React.Component { 0 ) / 2 ); - this.props.layoutDoc[this.audioBucketField(this.clipStart, this.clipEnd)] = new List(bucketList); + this.props.layoutDoc[fieldKey] = new List(bucketList); }) ); } diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index e00e66295..7859d3c3f 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -50,7 +50,6 @@ import { DragManager } from "../../util/DragManager"; type PanZoomDocument = makeInterface<[]>; const PanZoomDocument = makeInterface(); export type CollectionStackedTimelineProps = { - clipDuration: number; Play: () => void; Pause: () => void; playLink: (linkDoc: Doc) => void; @@ -65,6 +64,7 @@ export type CollectionStackedTimelineProps = { trimming: boolean; clipStart: number; clipEnd: number; + clipDuration: number; trimStart: () => number; trimEnd: () => number; trimDuration: () => number; @@ -165,8 +165,8 @@ export class CollectionStackedTimeline extends CollectionSubView< } toTimeline = (screen_delta: number, width: number) => { return Math.max( - this.trimStart, - Math.min(this.trimEnd, (screen_delta / width) * this.props.trimDuration() + this.trimStart)); + this.props.clipStart, + Math.min(this.props.clipEnd, (screen_delta / width) * this.props.clipDuration + this.props.clipStart)); } rangeClickScript = () => CollectionStackedTimeline.RangeScript; @@ -282,12 +282,7 @@ export class CollectionStackedTimeline extends CollectionSubView< this.props.isSelected(true) || this.props.isContentActive(), undefined, () => { - !wasPlaying && - (this.props.trimming && this.clipDuration ? - this.props.setTime(((clientX - rect.x) / rect.width) * this.clipDuration) - : - this.props.setTime(((clientX - rect.x) / rect.width) * this.props.trimDuration() + this.trimStart) - ); + !wasPlaying && this.props.setTime(((clientX - rect.x) / rect.width) * this.clipDuration + this.props.clipStart); } ); } diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 6a25ffaeb..bfc15cea8 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -16,7 +16,7 @@ import { makeInterface } from "../../../fields/Schema"; import { ComputedField } from "../../../fields/ScriptField"; import { Cast, NumCast } from "../../../fields/Types"; import { AudioField, nullAudio } from "../../../fields/URLField"; -import { emptyFunction, formatTime, OmitKeys } from "../../../Utils"; +import { emptyFunction, formatTime, OmitKeys, setupMoveUpEvents, returnFalse } from "../../../Utils"; import { DocUtils } from "../../documents/Documents"; import { Networking } from "../../Network"; import { CurrentUserUtils } from "../../util/CurrentUserUtils"; @@ -48,13 +48,14 @@ export class AudioBox extends ViewBoxAnnotatableComponent< ViewBoxAnnotatableProps & FieldViewProps, AudioDocument >(AudioDocument) { - public static LayoutString(fieldKey: string) { - return FieldView.LayoutString(AudioBox, fieldKey); - } + public static LayoutString(fieldKey: string) { return FieldView.LayoutString(AudioBox, fieldKey); } public static Enabled = false; static playheadWidth = 40; // width of playhead static heightPercent = 75; // height of timeline in percent of height of audioBox. static Instance: AudioBox; + static ScopeAll = 2; + static ScopeClip = 1; + static ScopeNone = 0; _disposers: { [name: string]: IReactionDisposer } = {}; _ele: HTMLAudioElement | null = null; @@ -74,14 +75,19 @@ export class AudioBox extends ViewBoxAnnotatableComponent< @observable _position: number = 0; @observable _waveHeight: Opt = this.layoutDoc._height; @observable _paused: boolean = false; - @observable _trimming: boolean = false; + @observable _trimming: number = AudioBox.ScopeNone; @observable _trimStart: number = NumCast(this.layoutDoc.clipStart); @observable _trimEnd: number | undefined = Cast(this.layoutDoc.clipEnd, "number"); - @computed get clipStart() { return NumCast(this.layoutDoc.clipStart); } - @computed get clipEnd() { return NumCast(this.layoutDoc.clipEnd, this.duration); } - @computed get trimStart() { return this._trimming ? this._trimStart : NumCast(this.layoutDoc.clipStart); } + @computed get clipStart() { return this._trimming === AudioBox.ScopeAll ? 0 : NumCast(this.layoutDoc.clipStart); } + @computed get clipDuration() { + return this._trimming === AudioBox.ScopeAll ? NumCast(this.dataDoc[`${this.fieldKey}-duration`]) : + NumCast(this.layoutDoc.clipEnd, this.clipStart + NumCast(this.dataDoc[`${this.fieldKey}-duration`])) - this.clipStart; + } + @computed get clipEnd() { return this.clipStart + this.clipDuration; } + @computed get trimStart() { return this._trimming !== AudioBox.ScopeNone ? this._trimStart : NumCast(this.layoutDoc.clipStart); } + @computed get trimDuration() { return this.trimEnd - this.trimStart; } @computed get trimEnd() { - return this._trimming && this._trimEnd !== undefined ? this._trimEnd : NumCast(this.layoutDoc.clipEnd, this.duration); + return this._trimming !== AudioBox.ScopeNone && this._trimEnd !== undefined ? this._trimEnd : NumCast(this.layoutDoc.clipEnd, this.clipDuration); } @computed get mediaState(): @@ -113,13 +119,6 @@ export class AudioBox extends ViewBoxAnnotatableComponent< @computed get rawDuration() { return NumCast(this.dataDoc[`${this.fieldKey}-duration`]); } - @computed get duration() { - return NumCast(this.layoutDoc.clipEnd, NumCast(this.layoutDoc.clipStart) + NumCast(this.dataDoc[`${this.fieldKey}-duration`])) - NumCast(this.layoutDoc.clipStart); - // NumCast(this.dataDoc[`${this.fieldKey}-duration`]); - } - @computed get trimDuration() { - return this.trimEnd - this.trimStart; - } @computed get anchorDocs() { return DocListCast(this.dataDoc[this.annotationKey]); } @@ -269,7 +268,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< this._ele.currentTime = start; this._ele.play(); runInAction(() => (this.mediaState = "playing")); - if (endTime !== this.duration) { + if (endTime !== this.clipDuration) { this._play = setTimeout( () => { this._ended = fullPlay ? true : this._ended; @@ -309,7 +308,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< const [{ result }] = await Networking.UploadFilesToServer(e.data); if (!(result instanceof Error)) { this.props.Document[this.props.fieldKey] = new AudioField(result.accessPaths.agnostic.client); - if (this._trimEnd === undefined) this._trimEnd = this.duration; + if (this._trimEnd === undefined) this._trimEnd = this.clipDuration; } }; this._recordStart = new Date().getTime(); @@ -359,9 +358,9 @@ export class AudioBox extends ViewBoxAnnotatableComponent< this.dataDoc[this.fieldKey + "-duration"] = (new Date().getTime() - this._recordStart - this.pauseTime) / 1000; this.mediaState = "paused"; - this._trimEnd = this.duration; + this._trimEnd = this.clipDuration; this.layoutDoc.clipStart = 0; - this.layoutDoc.clipEnd = this.duration; + this.layoutDoc.clipEnd = this.clipDuration; this._stream?.getAudioTracks()[0].stop(); const ind = DocUtils.ActiveRecordings.indexOf(this); ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); @@ -378,7 +377,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< // for play button Play = (e?: any) => { let start; - if (this._ended || this._ele!.currentTime === this.duration) { + if (this._ended || this._ele!.currentTime === this.clipDuration) { start = NumCast(this.layoutDoc.clipStart); this._ended = false; } @@ -494,11 +493,11 @@ export class AudioBox extends ViewBoxAnnotatableComponent< // shows trim controls @action - startTrim = () => { + startTrim = (scope: number) => { if (this.mediaState === "playing") { this.Pause(); } - this._trimming = true; + this._trimming = scope; } // hides trim controls and displays new clip @@ -510,7 +509,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< this.layoutDoc.clipStart = this.trimStart; this.layoutDoc.clipEnd = this.trimEnd; this.setAnchorTime(Math.max(Math.min(this.trimEnd, this._ele!.currentTime), this.trimStart)); - this._trimming = false; + this._trimming = AudioBox.ScopeNone; }); @action @@ -579,15 +578,15 @@ export class AudioBox extends ViewBoxAnnotatableComponent< rawDuration={this.rawDuration} // this edits the entire waveform when trimming is activated - clipStart={this._trimming ? 0 : this.clipStart} - clipEnd={this._trimming ? this.rawDuration : this.clipEnd} - clipDuration={this._trimming ? this.rawDuration : this.duration} + clipStart={this._trimming === AudioBox.ScopeAll ? 0 : this.clipStart} + clipEnd={this._trimming === AudioBox.ScopeAll ? this.rawDuration : this.clipEnd} + clipDuration={this._trimming === AudioBox.ScopeAll ? this.rawDuration : this.clipDuration} // this edits just the current waveform clip when trimming is activated // clipStart={this.clipStart} // clipEnd={this.clipEnd} // clipDuration={this.duration} - trimming={this._trimming} + trimming={this._trimming !== AudioBox.ScopeNone} trimStart={this.trimStartFunc} trimEnd={this.trimEndFunc} trimDuration={this.trimDurationFunc} @@ -596,6 +595,15 @@ export class AudioBox extends ViewBoxAnnotatableComponent< /> ); } + onClipPointerDown = (e: React.PointerEvent) => { + setupMoveUpEvents(this, e, returnFalse, returnFalse, action((e: PointerEvent, doubleTap?: boolean) => { + if (doubleTap) { + this.startTrim(AudioBox.ScopeAll); + } else { + this._trimming !== AudioBox.ScopeNone ? this.finishTrim() : this.startTrim(AudioBox.ScopeClip); + } + })); + } render() { const interactive = @@ -697,11 +705,11 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
@@ -719,14 +727,10 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
{this.audio}
- {this._trimming ? - formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode))) - : formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this.trimStart)))} + {formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this.clipStart)))}
- {this._trimming || !this._trimEnd ? - formatTime(Math.round(NumCast(this.duration))) - : formatTime(Math.round(NumCast(this.trimDuration)))} + {formatTime(Math.round(NumCast(this.clipDuration)))}
-- cgit v1.2.3-70-g09d2 From 2e51127cd8394d3cf9fb0ce4572c5790f1618d2f Mon Sep 17 00:00:00 2001 From: mehekj Date: Fri, 24 Sep 2021 12:31:18 -0400 Subject: removed marker vertical offset --- .../collections/CollectionStackedTimeline.tsx | 32 +++++++++++----------- 1 file changed, 16 insertions(+), 16 deletions(-) (limited to 'src/client/views/collections') diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index 7859d3c3f..899e22984 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -581,7 +581,7 @@ export class CollectionStackedTimeline extends CollectionSubView< start + (10 / timelineContentWidth) * this.clipDuration ); const left = Math.max((start - this.props.clipStart) / this.clipDuration * timelineContentWidth, 0); - const top = (d.level / maxLevel) * this.timelineContentHeight() + 15; + const top = (d.level / maxLevel) * this.timelineContentHeight(); const timespan = end - start; const width = (timespan / this.clipDuration) * timelineContentWidth; const height = (this.timelineContentHeight()) / maxLevel; @@ -860,21 +860,21 @@ class StackedTimelineAnchor extends React.Component {inner.view} {!inner.anchor.view || !SelectionManager.IsSelected(inner.anchor.view) ? null : ( - <> -
this.onAnchorDown(e, this.props.mark, true)} - /> -
- this.onAnchorDown(e, this.props.mark, false) - } - /> - - )} + <> +
this.onAnchorDown(e, this.props.mark, true)} + /> +
+ this.onAnchorDown(e, this.props.mark, false) + } + /> + + )} ); } -- cgit v1.2.3-70-g09d2 From dd715f07c391173bc1c4c57252043fdf4f843ce2 Mon Sep 17 00:00:00 2001 From: bobzel Date: Fri, 24 Sep 2021 12:58:29 -0400 Subject: fixed double-click on trim ends to reset properly. --- .../collections/CollectionStackedTimeline.tsx | 34 +++++++++++----------- 1 file changed, 17 insertions(+), 17 deletions(-) (limited to 'src/client/views/collections') diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index 899e22984..8f8936997 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -311,7 +311,7 @@ export class CollectionStackedTimeline extends CollectionSubView< emptyFunction, action((e, doubleTap) => { if (doubleTap) { - this.props.setStartTrim(0); + this.props.setStartTrim(this.props.clipStart); } }) ); @@ -339,7 +339,7 @@ export class CollectionStackedTimeline extends CollectionSubView< emptyFunction, action((e, doubleTap) => { if (doubleTap) { - this.props.setEndTrim(this.clipDuration); + this.props.setEndTrim(this.props.clipEnd); } }) ); @@ -860,21 +860,21 @@ class StackedTimelineAnchor extends React.Component {inner.view} {!inner.anchor.view || !SelectionManager.IsSelected(inner.anchor.view) ? null : ( - <> -
this.onAnchorDown(e, this.props.mark, true)} - /> -
- this.onAnchorDown(e, this.props.mark, false) - } - /> - - )} + <> +
this.onAnchorDown(e, this.props.mark, true)} + /> +
+ this.onAnchorDown(e, this.props.mark, false) + } + /> + + )} ); } -- cgit v1.2.3-70-g09d2 From 8b49822b6a80c19424dd9adda9516b6806757a58 Mon Sep 17 00:00:00 2001 From: bobzel Date: Fri, 24 Sep 2021 14:01:00 -0400 Subject: styled label markers on audio timeline to have border rounding. fixed setting drop location of documents on a clipped timeline. fixed clipping markers on timeline that do not overlap current clip. fixed escape aborting selecting a region on timeline to not restart on movement after esc. --- src/client/views/StyleProvider.tsx | 2 +- .../collections/CollectionStackedTimeline.tsx | 37 ++++++++++++++-------- 2 files changed, 24 insertions(+), 15 deletions(-) (limited to 'src/client/views/collections') diff --git a/src/client/views/StyleProvider.tsx b/src/client/views/StyleProvider.tsx index cd6e11bda..e6490a56c 100644 --- a/src/client/views/StyleProvider.tsx +++ b/src/client/views/StyleProvider.tsx @@ -106,7 +106,7 @@ export function DefaultStyleProvider(doc: Opt, props: Opt (props?.PanelHeight() || 0) ? 5 : 10) : 0; diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index 8f8936997..fb0a0ec1d 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -185,13 +185,15 @@ export class CollectionStackedTimeline extends CollectionSubView< this._markerStart = this._markerEnd = this.currentTime; CollectionStackedTimeline.SelectingRegion = this; } else { + this._markerEnd = this.currentTime; CollectionStackedTimeline.createAnchor( this.rootDoc, this.dataDoc, this.props.fieldKey, this.props.startTag, this.props.endTag, - this.currentTime + this._markerStart, + this._markerEnd ); CollectionStackedTimeline.SelectingRegion = undefined; } @@ -218,10 +220,11 @@ export class CollectionStackedTimeline extends CollectionSubView< onPointerDownTimeline = (e: React.PointerEvent): void => { const rect = this._timeline?.getBoundingClientRect(); const clientX = e.clientX; + const shiftKey = e.shiftKey; if (rect && this.props.isContentActive()) { const wasPlaying = this.props.playing(); if (wasPlaying) this.props.Pause(); - const wasSelecting = CollectionStackedTimeline.SelectingRegion === this; + var wasSelecting = CollectionStackedTimeline.SelectingRegion === this; setupMoveUpEvents( this, e, @@ -235,6 +238,7 @@ export class CollectionStackedTimeline extends CollectionSubView< rect.width ); CollectionStackedTimeline.SelectingRegion = this; + wasSelecting = true; } this._markerEnd = this.toTimeline(e.clientX - rect.x, rect.width); return false; @@ -268,7 +272,12 @@ export class CollectionStackedTimeline extends CollectionSubView< }), (e, doubleTap) => { this.props.select(false); - e.shiftKey && + !wasPlaying && doubleTap && this.props.Play(); + }, + this.props.isSelected(true) || this.props.isContentActive(), + undefined, + () => { + if (shiftKey) { CollectionStackedTimeline.createAnchor( this.rootDoc, this.dataDoc, @@ -277,12 +286,9 @@ export class CollectionStackedTimeline extends CollectionSubView< this.props.endTag, this.currentTime ); - !wasPlaying && doubleTap && this.props.Play(); - }, - this.props.isSelected(true) || this.props.isContentActive(), - undefined, - () => { - !wasPlaying && this.props.setTime(((clientX - rect.x) / rect.width) * this.clipDuration + this.props.clipStart); + } else { + !wasPlaying && this.props.setTime(((clientX - rect.x) / rect.width) * this.clipDuration + this.props.clipStart); + } } ); } @@ -354,11 +360,11 @@ export class CollectionStackedTimeline extends CollectionSubView< // determine x coordinate of drop and assign it to the documents being dragged --- see internalDocDrop of collectionFreeFormView.tsx for how it's done when dropping onto a 2D freeform view const localPt = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); const x = localPt[0] - docDragData.offset[0]; - const timelineContentWidth = this.props.PanelWidth(); + const timelinePt = this.toTimeline(x, this.props.PanelWidth()); for (let i = 0; i < docDragData.droppedDocuments.length; i++) { const d = Doc.GetProto(docDragData.droppedDocuments[i]); - d._timecodeToHide = x / timelineContentWidth * this.props.trimDuration() + NumCast(d._timecodeToHide) - NumCast(d._timecodeToShow); - d._timecodeToShow = x / timelineContentWidth * this.props.trimDuration(); + d._timecodeToHide = timelinePt + NumCast(d._timecodeToHide) - NumCast(d._timecodeToShow); + d._timecodeToShow = timelinePt; } return true; @@ -391,6 +397,7 @@ export class CollectionStackedTimeline extends CollectionSubView< hideLinkButton: true, annotationOn: rootDoc, _timelineLabel: true, + borderRounding: anchorEndTime === undefined ? "100%" : undefined }); Doc.GetProto(anchor)[startTag] = anchorStartTime; Doc.GetProto(anchor)[endTag] = anchorEndTime; @@ -574,12 +581,12 @@ export class CollectionStackedTimeline extends CollectionSubView< onPointerDown={(e) => isActive && this.onPointerDownTimeline(e)} > {drawAnchors.map((d) => { - const start = this.anchorStart(d.anchor); const end = this.anchorEnd( d.anchor, start + (10 / timelineContentWidth) * this.clipDuration ); + if (end < this.props.clipStart || start > this.props.clipEnd) return (null); const left = Math.max((start - this.props.clipStart) / this.clipDuration * timelineContentWidth, 0); const top = (d.level / maxLevel) * this.timelineContentHeight(); const timespan = end - start; @@ -748,16 +755,18 @@ class StackedTimelineAnchor extends React.Component const rect = (e.target as any).getBoundingClientRect(); return this.props.toTimeline(e.clientX - rect.x, rect.width); }; - const changeAnchor = (anchor: Doc, left: boolean, time: number) => { + const changeAnchor = (anchor: Doc, left: boolean, time: number | undefined) => { const timelineOnly = Cast(anchor[this.props.startTag], "number", null) !== undefined; if (timelineOnly) { + if (!left && time !== undefined && time <= NumCast(anchor[this.props.startTag])) time = undefined; Doc.SetInPlace( anchor, left ? this.props.startTag : this.props.endTag, time, true ); + if (!left) Doc.SetInPlace(anchor, "borderRounding", time !== undefined ? undefined : "100%", true); } else { left -- cgit v1.2.3-70-g09d2 From 1d808673353518a1e8ce0eaaf2d9fe14321a9210 Mon Sep 17 00:00:00 2001 From: bobzel Date: Fri, 24 Sep 2021 14:23:36 -0400 Subject: fixed dropping label markers on timelines to not disappear. made all timeline markers draggable when the timeline is active (without needing to be selected). --- .../collections/CollectionStackedTimeline.tsx | 26 ++++++++++------------ 1 file changed, 12 insertions(+), 14 deletions(-) (limited to 'src/client/views/collections') diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index fb0a0ec1d..43f78cf78 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -363,7 +363,9 @@ export class CollectionStackedTimeline extends CollectionSubView< const timelinePt = this.toTimeline(x, this.props.PanelWidth()); for (let i = 0; i < docDragData.droppedDocuments.length; i++) { const d = Doc.GetProto(docDragData.droppedDocuments[i]); - d._timecodeToHide = timelinePt + NumCast(d._timecodeToHide) - NumCast(d._timecodeToShow); + if (d._timecodeToHide !== undefined) { + d._timecodeToHide = timelinePt + NumCast(d._timecodeToHide) - NumCast(d._timecodeToShow); + } d._timecodeToShow = timelinePt; } @@ -497,14 +499,9 @@ export class CollectionStackedTimeline extends CollectionSubView< } dictationHeightPercent = 50; - dictationHeight = () => - (this.props.PanelHeight() * (100 - this.dictationHeightPercent)) / 100 - timelineContentHeight = () => - (this.props.PanelHeight() * this.dictationHeightPercent) / 100 - dictationScreenToLocalTransform = () => - this.props - .ScreenToLocalTransform() - .translate(0, -this.timelineContentHeight()) + dictationHeight = () => (this.props.PanelHeight() * (100 - this.dictationHeightPercent)) / 100 + timelineContentHeight = () => (this.props.PanelHeight() * this.dictationHeightPercent) / 100 + dictationScreenToLocalTransform = () => this.props.ScreenToLocalTransform().translate(0, -this.timelineContentHeight()) @computed get renderDictation() { const dictation = Cast(this.dataDoc[this.props.dictationKey], Doc, null); return !dictation ? null : ( @@ -558,6 +555,7 @@ export class CollectionStackedTimeline extends CollectionSubView< ); } + isContentActive = () => this.props.isSelected() || this.props.isContentActive(); currentTimecode = () => this.currentTime; render() { const timelineContentWidth = this.props.PanelWidth(); @@ -571,14 +569,12 @@ export class CollectionStackedTimeline extends CollectionSubView< anchor, })); const maxLevel = overlaps.reduce((m, o) => Math.max(m, o.level), 0) + 2; - const isActive = - this.props.isContentActive() || this.props.isSelected(false); return (
(this._timeline = timeline)} - onClick={(e) => isActive && StopEvent(e)} - onPointerDown={(e) => isActive && this.onPointerDownTimeline(e)} + onClick={(e) => this.isContentActive() && StopEvent(e)} + onPointerDown={(e) => this.isContentActive() && this.onPointerDownTimeline(e)} > {drawAnchors.map((d) => { const start = this.anchorStart(d.anchor); @@ -618,6 +614,7 @@ export class CollectionStackedTimeline extends CollectionSubView< height={height} toTimeline={this.toTimeline} layoutDoc={this.layoutDoc} + isDocumentActive={this.props.childDocumentsActive ? this.props.isDocumentActive : this.isContentActive} currentTimecode={this.currentTimecode} _timeline={this._timeline} stackedTimeline={this} @@ -691,6 +688,7 @@ interface StackedTimelineAnchorProps { endTag: string; renderDepth: number; layoutDoc: Doc; + isDocumentActive?: () => boolean; ScreenToLocalTransform: () => Transform; _timeline: HTMLDivElement | null; focus: DocFocusFunc; @@ -833,7 +831,7 @@ class StackedTimelineAnchor extends React.Component renderDepth={this.props.renderDepth + 1} LayoutTemplate={undefined} LayoutTemplateString={LabelBox.LayoutStringWithTitle(LabelBox, "data", this.computeTitle())} - isDocumentActive={returnFalse} + isDocumentActive={this.props.isDocumentActive} PanelWidth={() => width} PanelHeight={() => height} ScreenToLocalTransform={() => -- cgit v1.2.3-70-g09d2 From 6bcf4ae5f3953ba10ba1fba6c7d2246514a90eed Mon Sep 17 00:00:00 2001 From: bobzel Date: Fri, 24 Sep 2021 22:31:22 -0400 Subject: refactored trim stuff out of audiobox into collectionstackedtimeline so that videobox can reuse trimming --- src/client/views/AudioWaveform.tsx | 5 +- src/client/views/ContextMenuItem.tsx | 4 +- .../collections/CollectionStackedTimeline.tsx | 193 +++++------- src/client/views/nodes/AudioBox.tsx | 350 ++++++++------------- src/client/views/nodes/VideoBox.tsx | 59 ++-- 5 files changed, 248 insertions(+), 363 deletions(-) (limited to 'src/client/views/collections') diff --git a/src/client/views/AudioWaveform.tsx b/src/client/views/AudioWaveform.tsx index 0a441552e..0e9c00656 100644 --- a/src/client/views/AudioWaveform.tsx +++ b/src/client/views/AudioWaveform.tsx @@ -16,7 +16,6 @@ export interface AudioWaveformProps { rawDuration: number; // length of underlying media data mediaPath: string; layoutDoc: Doc; - trimming: boolean; clipStart: number; clipEnd: number; PanelHeight: () => number; @@ -42,8 +41,8 @@ export class AudioWaveform extends React.Component { ({ clipStart, clipEnd, fieldKey }) => { if (!this.props.layoutDoc[fieldKey]) { // setting these values here serves as a "lock" to prevent multiple attempts to create the waveform at nerly the same time. - const waveform = Cast(this.props.layoutDoc[this.audioBucketField(0, this.props.rawDuration)], listSpec("number"), []); - this.props.layoutDoc[fieldKey] = new List(waveform.slice(clipStart / this.props.rawDuration * waveform.length, clipEnd / this.props.rawDuration * waveform.length)); + const waveform = Cast(this.props.layoutDoc[this.audioBucketField(0, this.props.rawDuration)], listSpec("number")); + this.props.layoutDoc[fieldKey] = waveform && new List(waveform.slice(clipStart / this.props.rawDuration * waveform.length, clipEnd / this.props.rawDuration * waveform.length)); setTimeout(() => this.createWaveformBuckets(fieldKey, clipStart, clipEnd)); } }, { fireImmediately: true }); diff --git a/src/client/views/ContextMenuItem.tsx b/src/client/views/ContextMenuItem.tsx index c3921d846..25d00f701 100644 --- a/src/client/views/ContextMenuItem.tsx +++ b/src/client/views/ContextMenuItem.tsx @@ -39,7 +39,7 @@ export class ContextMenuItem extends React.Component) => { if ("event" in this.props) { - this.props.closeMenu && this.props.closeMenu(); + this.props.closeMenu?.(); let batch: UndoManager.Batch | undefined; if (this.props.undoable !== false) { batch = UndoManager.StartBatch(`Context menu event: ${this.props.description}`); @@ -90,7 +90,7 @@ export class ContextMenuItem extends React.Component ) : null}
- {this.props.description.replace(":","")} + {this.props.description.replace(":", "")}
); diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index 43f78cf78..48014921a 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -61,24 +61,21 @@ export type CollectionStackedTimelineProps = { mediaPath: string; dictationKey: string; rawDuration: number; - trimming: boolean; - clipStart: number; - clipEnd: number; - clipDuration: number; - trimStart: () => number; - trimEnd: () => number; - trimDuration: () => number; - setStartTrim: (newStart: number) => void; - setEndTrim: (newEnd: number) => void; + fieldKey: string; }; +export enum TrimScope { + All = 2, + Clip = 1, + None = 0, +} + @observer export class CollectionStackedTimeline extends CollectionSubView< PanZoomDocument, CollectionStackedTimelineProps >(PanZoomDocument) { - @observable static SelectingRegion: CollectionStackedTimeline | undefined = - undefined; + @observable static SelectingRegion: CollectionStackedTimeline | undefined; static RangeScript: ScriptField; static LabelScript: ScriptField; static RangePlayScript: ScriptField; @@ -87,37 +84,43 @@ export class CollectionStackedTimeline extends CollectionSubView< private _timeline: HTMLDivElement | null = null; private _markerStart: number = 0; @observable _markerEnd: number = 0; + @observable _trimming: number = TrimScope.None; + @observable _trimStart: number = 0; + @observable _trimEnd: number = 0; - get minLength() { - const rect = this._timeline?.getBoundingClientRect(); - if (rect) { - return 0.05 * this.clipDuration; - } - return 0; - } + get minTrimLength() { return this._timeline?.getBoundingClientRect() ? 0.05 * this.clipDuration : 0; } + @computed get trimStart() { return this.IsTrimming !== TrimScope.None ? this._trimStart : this.clipStart; } + @computed get trimDuration() { return this.trimEnd - this.trimStart; } + @computed get trimEnd() { return this.IsTrimming !== TrimScope.None ? this._trimEnd : this.clipEnd; } - get trimStart() { - return this.props.trimStart(); - } + @computed get clipStart() { return this.IsTrimming === TrimScope.All ? 0 : NumCast(this.layoutDoc.clipStart); } + @computed get clipDuration() { return this.clipEnd - this.clipStart; } + @computed get clipEnd() { return this.IsTrimming === TrimScope.All ? this.props.rawDuration : NumCast(this.layoutDoc.clipEnd, this.props.rawDuration); } - get trimEnd() { - return this.props.trimEnd(); - } + @computed get currentTime() { return NumCast(this.layoutDoc._currentTimecode); } - get clipDuration() { - return this.props.clipDuration; - } + public get IsTrimming() { return this._trimming; } - @computed get currentTime() { - return NumCast(this.layoutDoc._currentTimecode); + @action + public StartTrimming(scope: TrimScope) { + this._trimStart = this.clipStart; + this._trimEnd = this.clipEnd; + this._trimming = scope; + } + @action + public StopTrimming() { + this.layoutDoc.clipStart = this.trimStart; + this.layoutDoc.clipEnd = this.trimEnd; + this._trimming = TrimScope.None; } + @computed get selectionContainer() { return CollectionStackedTimeline.SelectingRegion !== this ? null : (
); @@ -145,28 +148,21 @@ export class CollectionStackedTimeline extends CollectionSubView< componentDidMount() { document.addEventListener("keydown", this.keyEvents, true); } + + @action componentWillUnmount() { document.removeEventListener("keydown", this.keyEvents, true); if (CollectionStackedTimeline.SelectingRegion === this) { - runInAction( - () => (CollectionStackedTimeline.SelectingRegion = undefined) - ); + CollectionStackedTimeline.SelectingRegion = undefined; } } - anchorStart = (anchor: Doc) => - NumCast(anchor._timecodeToShow, NumCast(anchor[this.props.startTag])) - anchorEnd = (anchor: Doc, val: any = null) => { - const endVal = NumCast(anchor[this.props.endTag], val); - return NumCast( - anchor._timecodeToHide, - endVal === undefined ? null : endVal - ); - } + anchorStart = (anchor: Doc) => NumCast(anchor._timecodeToShow, NumCast(anchor[this.props.startTag])) + anchorEnd = (anchor: Doc, val: any = null) => NumCast(anchor._timecodeToHide, NumCast(anchor[this.props.endTag], val)); toTimeline = (screen_delta: number, width: number) => { return Math.max( - this.props.clipStart, - Math.min(this.props.clipEnd, (screen_delta / width) * this.props.clipDuration + this.props.clipStart)); + this.clipStart, + Math.min(this.clipEnd, (screen_delta / width) * this.clipDuration + this.clipStart)); } rangeClickScript = () => CollectionStackedTimeline.RangeScript; @@ -233,10 +229,7 @@ export class CollectionStackedTimeline extends CollectionSubView< !wasSelecting && CollectionStackedTimeline.SelectingRegion !== this ) { - this._markerStart = this._markerEnd = this.toTimeline( - clientX - rect.x, - rect.width - ); + this._markerStart = this._markerEnd = this.toTimeline(clientX - rect.x, rect.width); CollectionStackedTimeline.SelectingRegion = this; wasSelecting = true; } @@ -254,7 +247,7 @@ export class CollectionStackedTimeline extends CollectionSubView< !isClick && CollectionStackedTimeline.SelectingRegion === this && Math.abs(movement[0]) > 15 && - !this.props.trimming + !this.IsTrimming ) { const anchor = CollectionStackedTimeline.createAnchor( this.rootDoc, @@ -287,7 +280,7 @@ export class CollectionStackedTimeline extends CollectionSubView< this.currentTime ); } else { - !wasPlaying && this.props.setTime(((clientX - rect.x) / rect.width) * this.clipDuration + this.props.clipStart); + !wasPlaying && this.props.setTime(this.toTimeline(clientX - rect.x, rect.width)); } } ); @@ -304,21 +297,19 @@ export class CollectionStackedTimeline extends CollectionSubView< e, action((e, [], []) => { if (rect && this.props.isContentActive()) { - this.props.setStartTrim(Math.min( + this._trimStart = Math.min( Math.max( this.trimStart + (e.movementX / rect.width) * this.clipDuration, 0 ), - this.trimEnd - this.minLength - )); + this.trimEnd - this.minTrimLength + ); } return false; }), emptyFunction, action((e, doubleTap) => { - if (doubleTap) { - this.props.setStartTrim(this.props.clipStart); - } + doubleTap && (this._trimStart = this.clipStart); }) ); } @@ -332,21 +323,19 @@ export class CollectionStackedTimeline extends CollectionSubView< e, action((e, [], []) => { if (rect && this.props.isContentActive()) { - this.props.setEndTrim(Math.max( + this._trimEnd = Math.max( Math.min( this.trimEnd + (e.movementX / rect.width) * this.clipDuration, - this.props.clipStart + this.clipDuration + this.clipStart + this.clipDuration ), - this.trimStart + this.minLength - )); + this.trimStart + this.minTrimLength + ); } return false; }), emptyFunction, action((e, doubleTap) => { - if (doubleTap) { - this.props.setEndTrim(this.props.clipEnd); - } + doubleTap && (this._trimEnd = this.clipEnd); }) ); } @@ -356,17 +345,16 @@ export class CollectionStackedTimeline extends CollectionSubView< if (!de.embedKey && this.props.layerProvider?.(this.props.Document) !== false && this.props.Document._isGroup) return false; if (!super.onInternalDrop(e, de)) return false; - // determine x coordinate of drop and assign it to the documents being dragged --- see internalDocDrop of collectionFreeFormView.tsx for how it's done when dropping onto a 2D freeform view const localPt = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); const x = localPt[0] - docDragData.offset[0]; const timelinePt = this.toTimeline(x, this.props.PanelWidth()); for (let i = 0; i < docDragData.droppedDocuments.length; i++) { const d = Doc.GetProto(docDragData.droppedDocuments[i]); - if (d._timecodeToHide !== undefined) { - d._timecodeToHide = timelinePt + NumCast(d._timecodeToHide) - NumCast(d._timecodeToShow); + if (this.anchorEnd(d) !== undefined) { + d[d._timecodeToHide === undefined ? this.props.endTag : "_timecodeToHide"] = timelinePt + this.anchorEnd(d) - this.anchorStart(d); } - d._timecodeToShow = timelinePt; + d[d._timecodToShow === undefined ? this.props.startTag : "_timecodToShow"] = timelinePt; } return true; @@ -403,7 +391,7 @@ export class CollectionStackedTimeline extends CollectionSubView< }); Doc.GetProto(anchor)[startTag] = anchorStartTime; Doc.GetProto(anchor)[endTag] = anchorEndTime; - if (Cast(dataDoc[fieldKey], listSpec(Doc), null) !== undefined) { + if (Cast(dataDoc[fieldKey], listSpec(Doc), null)) { Cast(dataDoc[fieldKey], listSpec(Doc), []).push(anchor); } else { dataDoc[fieldKey] = new List([anchor]); @@ -546,10 +534,9 @@ export class CollectionStackedTimeline extends CollectionSubView< duration={this.clipDuration} mediaPath={this.props.mediaPath} layoutDoc={this.layoutDoc} - clipStart={this.props.clipStart} - clipEnd={this.props.clipEnd} + clipStart={this.clipStart} + clipEnd={this.clipEnd} PanelHeight={this.timelineContentHeight} - trimming={this.props.trimming} />
); @@ -582,12 +569,12 @@ export class CollectionStackedTimeline extends CollectionSubView< d.anchor, start + (10 / timelineContentWidth) * this.clipDuration ); - if (end < this.props.clipStart || start > this.props.clipEnd) return (null); - const left = Math.max((start - this.props.clipStart) / this.clipDuration * timelineContentWidth, 0); + if (end < this.clipStart || start > this.clipEnd) return (null); + const left = Math.max((start - this.clipStart) / this.clipDuration * timelineContentWidth, 0); const top = (d.level / maxLevel) * this.timelineContentHeight(); const timespan = end - start; const width = (timespan / this.clipDuration) * timelineContentWidth; - const height = (this.timelineContentHeight()) / maxLevel; + const height = this.timelineContentHeight() / maxLevel; return this.props.Document.hideAnchors ? null : (
); })} - {!this.props.trimming && this.selectionContainer} + {!this.IsTrimming && this.selectionContainer} {this.renderAudioWaveform} {this.renderDictation}
- {this.props.trimming && ( + {this.IsTrimming && ( <>
@@ -662,8 +649,8 @@ export class CollectionStackedTimeline extends CollectionSubView<
@@ -754,8 +741,7 @@ class StackedTimelineAnchor extends React.Component return this.props.toTimeline(e.clientX - rect.x, rect.width); }; const changeAnchor = (anchor: Doc, left: boolean, time: number | undefined) => { - const timelineOnly = - Cast(anchor[this.props.startTag], "number", null) !== undefined; + const timelineOnly = Cast(anchor[this.props.startTag], "number", null) !== undefined; if (timelineOnly) { if (!left && time !== undefined && time <= NumCast(anchor[this.props.startTag])) time = undefined; Doc.SetInPlace( @@ -767,9 +753,7 @@ class StackedTimelineAnchor extends React.Component if (!left) Doc.SetInPlace(anchor, "borderRounding", time !== undefined ? undefined : "100%", true); } else { - left - ? (anchor._timecodeToShow = time) - : (anchor._timecodeToHide = time); + anchor[left ? "_timecodeToShow" : "_timecodeToHide"] = time; } return false; }; @@ -803,10 +787,9 @@ class StackedTimelineAnchor extends React.Component mark: Doc, script: undefined | (() => ScriptField), doublescript: undefined | (() => ScriptField), - x: number, - y: number, - width: number, - height: number + screenXf: () => Transform, + width: () => number, + height: () => number ) { const anchor = observable({ view: undefined as any }); const focusFunc = ( @@ -825,24 +808,20 @@ class StackedTimelineAnchor extends React.Component (anchor.view = r))} + ref={action((r: DocumentView | null) => anchor.view = r)} Document={mark} DataDoc={undefined} renderDepth={this.props.renderDepth + 1} LayoutTemplate={undefined} LayoutTemplateString={LabelBox.LayoutStringWithTitle(LabelBox, "data", this.computeTitle())} isDocumentActive={this.props.isDocumentActive} - PanelWidth={() => width} - PanelHeight={() => height} - ScreenToLocalTransform={() => - this.props.ScreenToLocalTransform().translate(-x, -y) - } + PanelWidth={width} + PanelHeight={height} + ScreenToLocalTransform={screenXf} focus={focusFunc} rootSelected={returnFalse} onClick={script} - onDoubleClick={ - this.props.layoutDoc.autoPlayAnchors ? undefined : doublescript - } + onDoubleClick={this.props.layoutDoc.autoPlayAnchors ? undefined : doublescript} ignoreAutoHeight={false} hideResizeHandles={true} bringToFront={emptyFunction} @@ -852,15 +831,17 @@ class StackedTimelineAnchor extends React.Component }; }); + anchorScreenToLocalXf = () => this.props.ScreenToLocalTransform().translate(-this.props.left, -this.props.top); + width = () => this.props.width; + height = () => this.props.height; render() { const inner = this.renderInner( this.props.mark, this.props.rangeClickScript, this.props.rangePlayScript, - this.props.left, - this.props.top, - this.props.width, - this.props.height + this.anchorScreenToLocalXf, + this.width, + this.height ); return ( <> @@ -876,9 +857,7 @@ class StackedTimelineAnchor extends React.Component
- this.onAnchorDown(e, this.props.mark, false) - } + onPointerDown={(e) => this.onAnchorDown(e, this.props.mark, false)} /> )} diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index bfc15cea8..81367ed19 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -14,15 +14,15 @@ import { Doc, DocListCast, Opt } from "../../../fields/Doc"; import { documentSchema } from "../../../fields/documentSchemas"; import { makeInterface } from "../../../fields/Schema"; import { ComputedField } from "../../../fields/ScriptField"; -import { Cast, NumCast } from "../../../fields/Types"; +import { Cast, NumCast, DateCast } from "../../../fields/Types"; import { AudioField, nullAudio } from "../../../fields/URLField"; -import { emptyFunction, formatTime, OmitKeys, setupMoveUpEvents, returnFalse } from "../../../Utils"; +import { emptyFunction, formatTime, OmitKeys, returnFalse, setupMoveUpEvents } from "../../../Utils"; import { DocUtils } from "../../documents/Documents"; import { Networking } from "../../Network"; import { CurrentUserUtils } from "../../util/CurrentUserUtils"; import { DragManager } from "../../util/DragManager"; import { SnappingManager } from "../../util/SnappingManager"; -import { CollectionStackedTimeline } from "../collections/CollectionStackedTimeline"; +import { CollectionStackedTimeline, TrimScope } from "../collections/CollectionStackedTimeline"; import { ContextMenu } from "../ContextMenu"; import { ContextMenuProps } from "../ContextMenuItem"; import { @@ -43,20 +43,21 @@ declare class MediaRecorder { type AudioDocument = makeInterface<[typeof documentSchema]>; const AudioDocument = makeInterface(documentSchema); +enum media_state { + PendingRecording = "pendingRecording", + Recording = "recording", + Paused = "paused", + Playing = "playing" +}; @observer -export class AudioBox extends ViewBoxAnnotatableComponent< - ViewBoxAnnotatableProps & FieldViewProps, - AudioDocument ->(AudioDocument) { +export class AudioBox extends ViewBoxAnnotatableComponent(AudioDocument) { public static LayoutString(fieldKey: string) { return FieldView.LayoutString(AudioBox, fieldKey); } public static Enabled = false; static playheadWidth = 40; // width of playhead static heightPercent = 75; // height of timeline in percent of height of audioBox. static Instance: AudioBox; - static ScopeAll = 2; - static ScopeClip = 1; - static ScopeNone = 0; + _dropDisposer?: DragManager.DragDropDisposer; _disposers: { [name: string]: IReactionDisposer } = {}; _ele: HTMLAudioElement | null = null; _stackedTimeline = React.createRef(); @@ -68,81 +69,39 @@ export class AudioBox extends ViewBoxAnnotatableComponent< _stream: MediaStream | undefined; _start: number = 0; _play: any = null; - _ended: boolean = false; @observable static _scrubTime = 0; @observable _markerEnd: number = 0; @observable _position: number = 0; @observable _waveHeight: Opt = this.layoutDoc._height; @observable _paused: boolean = false; - @observable _trimming: number = AudioBox.ScopeNone; - @observable _trimStart: number = NumCast(this.layoutDoc.clipStart); - @observable _trimEnd: number | undefined = Cast(this.layoutDoc.clipEnd, "number"); - @computed get clipStart() { return this._trimming === AudioBox.ScopeAll ? 0 : NumCast(this.layoutDoc.clipStart); } - @computed get clipDuration() { - return this._trimming === AudioBox.ScopeAll ? NumCast(this.dataDoc[`${this.fieldKey}-duration`]) : - NumCast(this.layoutDoc.clipEnd, this.clipStart + NumCast(this.dataDoc[`${this.fieldKey}-duration`])) - this.clipStart; - } - @computed get clipEnd() { return this.clipStart + this.clipDuration; } - @computed get trimStart() { return this._trimming !== AudioBox.ScopeNone ? this._trimStart : NumCast(this.layoutDoc.clipStart); } - @computed get trimDuration() { return this.trimEnd - this.trimStart; } - @computed get trimEnd() { - return this._trimming !== AudioBox.ScopeNone && this._trimEnd !== undefined ? this._trimEnd : NumCast(this.layoutDoc.clipEnd, this.clipDuration); - } + @computed get recordingStart() { return DateCast(this.dataDoc[this.fieldKey + "-recordingStart"])?.date.getTime(); } + @computed get rawDuration() { return NumCast(this.dataDoc[`${this.fieldKey}-duration`]); } + @computed get anchorDocs() { return DocListCast(this.dataDoc[this.annotationKey]); } + @computed get links() { return DocListCast(this.dataDoc.links); } + @computed get pauseTime() { return this._pauseEnd - this._pauseStart; } // total time paused to update the correct time + @computed get heightPercent() { return AudioBox.heightPercent; } + @computed get mediaState() { return this.layoutDoc.mediaState as media_state; } + set mediaState(value) { this.layoutDoc.mediaState = value; } - @computed get mediaState(): - | undefined - | "pendingRecording" - | "recording" - | "paused" - | "playing" { - return this.layoutDoc.mediaState as - | undefined - | "pendingRecording" - | "recording" - | "paused" - | "playing"; - } - set mediaState(value) { - this.layoutDoc.mediaState = value; - } - public static SetScrubTime = action((timeInMillisFrom1970: number) => { - AudioBox._scrubTime = 0; - AudioBox._scrubTime = timeInMillisFrom1970; - }); - @computed get recordingStart() { - return Cast( - this.dataDoc[this.props.fieldKey + "-recordingStart"], - DateField - )?.date.getTime(); - } - @computed get rawDuration() { - return NumCast(this.dataDoc[`${this.fieldKey}-duration`]); - } - @computed get anchorDocs() { - return DocListCast(this.dataDoc[this.annotationKey]); - } - @computed get links() { - return DocListCast(this.dataDoc.links); - } - @computed get pauseTime() { - return this._pauseEnd - this._pauseStart; - } // total time paused to update the correct time - @computed get heightPercent() { - return AudioBox.heightPercent; - } + get timeline() { return this._stackedTimeline.current; } constructor(props: Readonly) { super(props); AudioBox.Instance = this; } + public static SetScrubTime = action((timeInMillisFrom1970: number) => { + AudioBox._scrubTime = 0; + AudioBox._scrubTime = timeInMillisFrom1970; + }); + getLinkData(l: Doc) { let la1 = l.anchor1 as Doc; let la2 = l.anchor2 as Doc; const linkTime = - this._stackedTimeline.current?.anchorStart(la2) || - this._stackedTimeline.current?.anchorStart(la1) || + this.timeline?.anchorStart(la2) || + this.timeline?.anchorStart(la1) || 0; if (Doc.AreProtosEqual(la1, this.dataDoc)) { la1 = l.anchor2 as Doc; @@ -152,47 +111,42 @@ export class AudioBox extends ViewBoxAnnotatableComponent< } getAnchor = () => { - return ( - CollectionStackedTimeline.createAnchor( - this.rootDoc, - this.dataDoc, - this.annotationKey, - "_timecodeToShow" /* audioStart */, - "_timecodeToHide" /* audioEnd */, - this._ele?.currentTime || - Cast(this.props.Document._currentTimecode, "number", null) || - (this.mediaState === "recording" - ? (Date.now() - (this.recordingStart || 0)) / 1000 - : undefined) - ) || this.rootDoc - ); + return CollectionStackedTimeline.createAnchor( + this.rootDoc, + this.dataDoc, + this.annotationKey, + "_timecodeToShow" /* audioStart */, + "_timecodeToHide" /* audioEnd */, + this._ele?.currentTime || + Cast(this.props.Document._currentTimecode, "number", null) || + (this.mediaState === media_state.Recording + ? (Date.now() - (this.recordingStart || 0)) / 1000 + : undefined) + ) || this.rootDoc; } componentWillUnmount() { - this.dropDisposer?.(); + this._dropDisposer?.(); Object.values(this._disposers).forEach((disposer) => disposer?.()); const ind = DocUtils.ActiveRecordings.indexOf(this); ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); } - private dropDisposer?: DragManager.DragDropDisposer; @action componentDidMount() { this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link. - this.mediaState = this.path ? "paused" : undefined; + this.mediaState = this.path ? media_state.Paused : undefined as any as media_state; this.path && this.setAnchorTime(NumCast(this.layoutDoc.clipStart)); this.path && this.timecodeChanged(); this._disposers.triggerAudio = reaction( - () => - !LinkDocPreview.LinkInfo && this.props.renderDepth !== -1 - ? NumCast(this.Document._triggerAudio, null) - : undefined, + () => !LinkDocPreview.LinkInfo && this.props.renderDepth !== -1 + ? NumCast(this.Document._triggerAudio, null) + : undefined, (start) => - start !== undefined && - setTimeout(() => { + start !== undefined && setTimeout(() => { this.playFrom(start); setTimeout(() => { this.Document._currentTimecode = start; @@ -203,13 +157,11 @@ export class AudioBox extends ViewBoxAnnotatableComponent< ); this._disposers.audioStop = reaction( - () => - this.props.renderDepth !== -1 && !LinkDocPreview.LinkInfo - ? Cast(this.Document._audioStop, "number", null) - : undefined, + () => this.props.renderDepth !== -1 && !LinkDocPreview.LinkInfo + ? Cast(this.Document._audioStop, "number", null) + : undefined, (audioStop) => - audioStop !== undefined && - setTimeout(() => { + audioStop !== undefined && setTimeout(() => { this.Pause(); setTimeout(() => (this.Document._audioStop = undefined), 10); }), // wait for mainCont and try again to play @@ -220,27 +172,25 @@ export class AudioBox extends ViewBoxAnnotatableComponent< // for updating the timecode @action timecodeChanged = () => { - const htmlEle = this._ele; - if (this.mediaState !== "recording" && htmlEle) { + if (this.mediaState !== media_state.Recording && this._ele) { this.links .map((l) => this.getLinkData(l)) .forEach(({ la1, la2, linkTime }) => { if ( linkTime > NumCast(this.layoutDoc._currentTimecode) && - linkTime < htmlEle.currentTime + linkTime < this._ele!.currentTime ) { Doc.linkFollowHighlight(la1); } }); - this.layoutDoc._currentTimecode = htmlEle.currentTime; - + this.layoutDoc._currentTimecode = this._ele.currentTime; } } // pause play back Pause = action(() => { this._ele!.pause(); - this.mediaState = "paused"; + this.mediaState = media_state.Paused; }); // play audio for documents created during recording @@ -251,32 +201,30 @@ export class AudioBox extends ViewBoxAnnotatableComponent< // play back the audio from time @action - playFrom = (seekTimeInSeconds: number, endTime: number = this.trimEnd, fullPlay: boolean = false) => { + playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false): any => { clearTimeout(this._play); if (Number.isNaN(this._ele?.duration)) { setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500); - } else if (this._ele && AudioBox.Enabled) { - if (seekTimeInSeconds < 0) { - if (seekTimeInSeconds > -1) { - setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000); - } else { - this.Pause(); - } - } else if (this.trimStart <= endTime && seekTimeInSeconds <= this.trimEnd) { - const start = Math.max(this.trimStart, seekTimeInSeconds); - const end = Math.min(this.trimEnd, endTime); + } + else if (this.timeline && this._ele && AudioBox.Enabled) { + const end = Math.min(this.timeline.trimEnd, endTime ?? this.timeline.trimEnd); + const start = Math.max(this.timeline.trimStart, seekTimeInSeconds); + if (seekTimeInSeconds >= 0 && this.timeline.trimStart <= end && seekTimeInSeconds <= this.timeline.trimEnd) { this._ele.currentTime = start; this._ele.play(); - runInAction(() => (this.mediaState = "playing")); - if (endTime !== this.clipDuration) { - this._play = setTimeout( + runInAction(() => this.mediaState = media_state.Playing); + if (end !== this.timeline.clipDuration) { + return this._play = setTimeout( () => { - this._ended = fullPlay ? true : this._ended; + if (fullPlay) this.setAnchorTime(this.timeline!.trimStart); this.Pause(); }, (end - start) * 1000 ); // use setTimeout to play a specific duration } + } + if (seekTimeInSeconds < 0 && seekTimeInSeconds > -1) { + setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000); } else { this.Pause(); } @@ -285,7 +233,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< // update the recording time updateRecordTime = () => { - if (this.mediaState === "recording") { + if (this.mediaState === media_state.Recording) { setTimeout(this.updateRecordTime, 30); if (this._paused) { this._pausedTime += (new Date().getTime() - this._recordStart) / 1000; @@ -300,22 +248,19 @@ export class AudioBox extends ViewBoxAnnotatableComponent< recordAudioAnnotation = async () => { this._stream = await navigator.mediaDevices.getUserMedia({ audio: true }); this._recorder = new MediaRecorder(this._stream); - this.dataDoc[this.props.fieldKey + "-recordingStart"] = new DateField( - new Date() - ); + this.dataDoc[this.fieldKey + "-recordingStart"] = new DateField(); DocUtils.ActiveRecordings.push(this); this._recorder.ondataavailable = async (e: any) => { const [{ result }] = await Networking.UploadFilesToServer(e.data); if (!(result instanceof Error)) { - this.props.Document[this.props.fieldKey] = new AudioField(result.accessPaths.agnostic.client); - if (this._trimEnd === undefined) this._trimEnd = this.clipDuration; + this.props.Document[this.fieldKey] = new AudioField(result.accessPaths.agnostic.client); } }; this._recordStart = new Date().getTime(); - runInAction(() => (this.mediaState = "recording")); + runInAction(() => this.mediaState = media_state.Recording); setTimeout(this.updateRecordTime, 0); this._recorder.start(); - setTimeout(() => this._recorder && this.stopRecording(), 60 * 60 * 1000); // stop after an hour + setTimeout(() => this.stopRecording(), 60 * 60 * 1000); // stop after an hour } // context menu @@ -353,17 +298,16 @@ export class AudioBox extends ViewBoxAnnotatableComponent< // stops the recording stopRecording = action(() => { - this._recorder.stop(); - this._recorder = undefined; - this.dataDoc[this.fieldKey + "-duration"] = - (new Date().getTime() - this._recordStart - this.pauseTime) / 1000; - this.mediaState = "paused"; - this._trimEnd = this.clipDuration; - this.layoutDoc.clipStart = 0; - this.layoutDoc.clipEnd = this.clipDuration; - this._stream?.getAudioTracks()[0].stop(); - const ind = DocUtils.ActiveRecordings.indexOf(this); - ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); + if (this._recorder) { + this._recorder.stop(); + this._recorder = undefined; + this.dataDoc[this.fieldKey + "-duration"] = + (new Date().getTime() - this._recordStart - this.pauseTime) / 1000; + this.mediaState = media_state.Paused; + this._stream?.getAudioTracks()[0].stop(); + const ind = DocUtils.ActiveRecordings.indexOf(this); + ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); + } }); // button for starting and stopping the recording @@ -376,16 +320,9 @@ export class AudioBox extends ViewBoxAnnotatableComponent< // for play button Play = (e?: any) => { - let start; - if (this._ended || this._ele!.currentTime === this.clipDuration) { - start = NumCast(this.layoutDoc.clipStart); - this._ended = false; - } - else { - start = this._ele!.currentTime; - } - - this.playFrom(start, this.trimEnd, true); + const eleTime = this._ele!.currentTime; + const start = eleTime === this.timeline?.trimDuration ? NumCast(this.layoutDoc.trimStart) : eleTime; + this.playFrom(start, undefined, true); e?.stopPropagation?.(); } @@ -402,7 +339,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< ); Doc.GetProto(newDoc).recordingSource = this.dataDoc; Doc.GetProto(newDoc).recordingStart = ComputedField.MakeFunction( - `self.recordingSource["${this.props.fieldKey}-recordingStart"]` + `self.recordingSource["${this.fieldKey}-recordingStart"]` ); Doc.GetProto(newDoc).mediaState = ComputedField.MakeFunction( "self.recordingSource.mediaState" @@ -420,7 +357,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< // returns the path of the audio file @computed get path() { - const field = Cast(this.props.Document[this.props.fieldKey], AudioField); + const field = Cast(this.props.Document[this.fieldKey], AudioField); const path = field instanceof AudioField ? field.url.href : ""; return path === nullAudio ? "" : path; } @@ -460,68 +397,33 @@ export class AudioBox extends ViewBoxAnnotatableComponent< e.stopPropagation(); } - playing = () => this.mediaState === "playing"; + playing = () => this.mediaState === media_state.Playing; playLink = (link: Doc) => { - const stack = this._stackedTimeline.current; if (link.annotationOn === this.rootDoc) { if (!this.layoutDoc.dontAutoPlayFollowedLinks) { - this.playFrom(stack?.anchorStart(link) || 0, stack?.anchorEnd(link)); + this.playFrom(this.timeline?.anchorStart(link) || 0, this.timeline?.anchorEnd(link)); } else { this._ele!.currentTime = this.layoutDoc._currentTimecode = - stack?.anchorStart(link) || 0; + this.timeline?.anchorStart(link) || 0; } } else { this.links .filter((l) => l.anchor1 === link || l.anchor2 === link) .forEach((l) => { const { la1, la2 } = this.getLinkData(l); - const startTime = stack?.anchorStart(la1) || stack?.anchorStart(la2); - const endTime = stack?.anchorEnd(la1) || stack?.anchorEnd(la2); + const startTime = this.timeline?.anchorStart(la1) || this.timeline?.anchorStart(la2); + const endTime = this.timeline?.anchorEnd(la1) || this.timeline?.anchorEnd(la2); if (startTime !== undefined) { if (!this.layoutDoc.dontAutoPlayFollowedLinks) { - endTime - ? this.playFrom(startTime, endTime) - : this.playFrom(startTime); + this.playFrom(startTime, endTime); } else { - this._ele!.currentTime = this.layoutDoc._currentTimecode = - startTime; + this._ele!.currentTime = this.layoutDoc._currentTimecode = startTime; } } }); } } - // shows trim controls - @action - startTrim = (scope: number) => { - if (this.mediaState === "playing") { - this.Pause(); - } - this._trimming = scope; - } - - // hides trim controls and displays new clip - @undoBatch - finishTrim = action(() => { - if (this.mediaState === "playing") { - this.Pause(); - } - this.layoutDoc.clipStart = this.trimStart; - this.layoutDoc.clipEnd = this.trimEnd; - this.setAnchorTime(Math.max(Math.min(this.trimEnd, this._ele!.currentTime), this.trimStart)); - this._trimming = AudioBox.ScopeNone; - }); - - @action - setStartTrim = (newStart: number) => { - this._trimStart = newStart; - } - - @action - setEndTrim = (newEnd: number) => { - this._trimEnd = newEnd; - } - isActiveChild = () => this._isAnyChildContentActive; timelineWhenChildContentsActiveChanged = (isActive: boolean) => this.props.whenChildContentsActiveChanged( @@ -543,9 +445,6 @@ export class AudioBox extends ViewBoxAnnotatableComponent< this.heightPercent) / 100 // panelHeight * heightPercent is player height. * heightPercent is timeline height (as per css inline) timelineWidth = () => this.props.PanelWidth() - AudioBox.playheadWidth; - trimEndFunc = () => this.trimEnd; - trimStartFunc = () => this.trimStart; - trimDurationFunc = () => this.trimDuration; @computed get renderTimeline() { return ( ); } + // hides trim controls and displays new clip + @undoBatch + finishTrim = action(() => { + this.Pause(); + this.setAnchorTime(Math.max(Math.min(this.timeline?.trimEnd || 0, this._ele!.currentTime), this.timeline?.trimStart || 0)); + this.timeline?.StopTrimming(); + }); + startTrim = (scope: TrimScope) => { + this.Pause(); + this.timeline?.StartTrimming(scope); + } + onClipPointerDown = (e: React.PointerEvent) => { - setupMoveUpEvents(this, e, returnFalse, returnFalse, action((e: PointerEvent, doubleTap?: boolean) => { + this.timeline && setupMoveUpEvents(this, e, returnFalse, returnFalse, action((e: PointerEvent, doubleTap?: boolean) => { if (doubleTap) { - this.startTrim(AudioBox.ScopeAll); - } else { - this._trimming !== AudioBox.ScopeNone ? this.finishTrim() : this.startTrim(AudioBox.ScopeClip); + this.startTrim(TrimScope.All); + } else if (this.timeline) { + this.Pause(); + this.timeline.IsTrimming !== TrimScope.None ? this.finishTrim() : this.startTrim(TrimScope.Clip); } })); } @@ -613,12 +509,12 @@ export class AudioBox extends ViewBoxAnnotatableComponent< return (
{ - if (r && this._stackedTimeline.current) { - this.dropDisposer?.(); - this.dropDisposer = DragManager.MakeDropTarget(r, + if (r && this.timeline) { + this._dropDisposer?.(); + this._dropDisposer = DragManager.MakeDropTarget(r, (e, de) => { const [xp, yp] = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); - de.complete.docDragData && this._stackedTimeline.current!.internalDocDrop(e, de, de.complete.docDragData, xp); + de.complete.docDragData && this.timeline!.internalDocDrop(e, de, de.complete.docDragData, xp); } , this.layoutDoc, undefined); } @@ -644,7 +540,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< size={this.props.PanelHeight() < 36 ? "1x" : "2x"} />
- {this.mediaState === "recording" || this.mediaState === "paused" ? ( + {this.mediaState === media_state.Recording || this.mediaState === media_state.Playing ? (
e.stopPropagation()}>
{" "}
@@ -727,10 +623,10 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
{this.audio}
- {formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this.clipStart)))} + {this.timeline && formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this.timeline.clipStart)))}
- {formatTime(Math.round(NumCast(this.clipDuration)))} + {this.timeline && formatTime(Math.round(NumCast(this.timeline?.clipDuration)))}
@@ -738,4 +634,4 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
); } -} +} \ No newline at end of file diff --git a/src/client/views/nodes/VideoBox.tsx b/src/client/views/nodes/VideoBox.tsx index 8b33842ff..af65cce9f 100644 --- a/src/client/views/nodes/VideoBox.tsx +++ b/src/client/views/nodes/VideoBox.tsx @@ -16,7 +16,7 @@ import { CurrentUserUtils } from "../../util/CurrentUserUtils"; import { SelectionManager } from "../../util/SelectionManager"; import { SnappingManager } from "../../util/SnappingManager"; import { CollectionFreeFormView } from "../collections/collectionFreeForm/CollectionFreeFormView"; -import { CollectionStackedTimeline } from "../collections/CollectionStackedTimeline"; +import { CollectionStackedTimeline, TrimScope } from "../collections/CollectionStackedTimeline"; import { ContextMenu } from "../ContextMenu"; import { ContextMenuProps } from "../ContextMenuItem"; import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from "../DocComponent"; @@ -31,6 +31,7 @@ import { DocumentManager } from "../../util/DocumentManager"; import { DocumentType } from "../../documents/DocumentTypes"; import { Tooltip } from "@material-ui/core"; import { AnchorMenu } from "../pdf/AnchorMenu"; +import { undoBatch } from "../../util/UndoManager"; const path = require('path'); type VideoDocument = makeInterface<[typeof documentSchema]>; @@ -101,7 +102,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent this.layoutDoc.autoPlayAnchors = !this.layoutDoc.autoPlayAnchors, icon: "expand-arrows-alt" }); subitems.push({ description: "Toggle Native Controls", event: action(() => VideoBox._nativeControls = !VideoBox._nativeControls), icon: "expand-arrows-alt" }); subitems.push({ description: "Copy path", event: () => { Utils.CopyText(url); }, icon: "expand-arrows-alt" }); + subitems.push({ description: "Start Trim All", event: () => this.startTrim(TrimScope.All), icon: "expand-arrows-alt" }); + subitems.push({ description: "Start Trim Clip", event: () => this.startTrim(TrimScope.Clip), icon: "expand-arrows-alt" }); + subitems.push({ description: "Stop Trim", event: () => this.finishTrim(), icon: "expand-arrows-alt" }); + subitems.push({ description: "Copy path", event: () => { Utils.CopyText(url); }, icon: "expand-arrows-alt" }); ContextMenu.Instance.addItem({ description: "Options...", subitems: subitems, icon: "video" }); } } @@ -483,34 +488,47 @@ export class VideoBox extends ViewBoxAnnotatableComponent { + playFrom = (seekTimeInSeconds: number, endTime?: number) => { clearTimeout(this._playRegionTimer); - this._playRegionDuration = endTime - seekTimeInSeconds; if (Number.isNaN(this.player?.duration)) { setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500); - } else if (this.player) { - if (seekTimeInSeconds < 0) { - if (seekTimeInSeconds > -1) { - setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000); - } else { - this.Pause(); - } - } else if (seekTimeInSeconds <= this.player.duration) { - this.player.currentTime = seekTimeInSeconds; + } + else if (this.player) { + const end = Math.min(this.timeline?.trimEnd ?? this.duration, endTime ?? this.timeline?.trimEnd ?? this.duration); + const start = Math.max(this.timeline?.trimStart ?? 0, seekTimeInSeconds); + this._playRegionDuration = end - seekTimeInSeconds; + if (seekTimeInSeconds >= 0 && (this.timeline?.trimStart || 0) <= end && seekTimeInSeconds <= (this.timeline?.trimEnd || this.duration)) { + this.player.currentTime = start; this._audioPlayer && (this._audioPlayer.currentTime = seekTimeInSeconds); this.player.play(); this._audioPlayer?.play(); runInAction(() => this._playing = true); if (endTime !== this.duration) { - this._playRegionTimer = setTimeout(() => this.Pause(), (this._playRegionDuration) * 1000); // use setTimeout to play a specific duration + return this._playRegionTimer = + setTimeout(() => this.Pause(), (this._playRegionDuration) * 1000); // use setTimeout to play a specific duration } + } + if (seekTimeInSeconds < 0 && seekTimeInSeconds > -1) { + setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000); } else { this.Pause(); } } } + // hides trim controls and displays new clip + @undoBatch + finishTrim = action(() => { + this.Pause(); + this._stackedTimeline.current?.StopTrimming(); + }); + startTrim = (scope: TrimScope) => { + this.Pause(); + this._stackedTimeline.current?.StartTrimming(scope); + } + playLink = (doc: Doc) => { const startTime = Math.max(0, (this._stackedTimeline.current?.anchorStart(doc) || 0)); @@ -524,7 +542,9 @@ export class VideoBox extends ViewBoxAnnotatableComponent this._playing; timelineWhenChildContentsActiveChanged = action((isActive: boolean) => this.props.whenChildContentsActiveChanged(this._isAnyChildContentActive = isActive)); timelineScreenToLocal = () => this.props.ScreenToLocalTransform().scale(this.scaling()).translate(0, -this.heightPercent / 100 * this.props.PanelHeight()); - setAnchorTime = (time: number) => this.player!.currentTime = this.layoutDoc._currentTimecode = time; + setAnchorTime = (time: number) => { + this.player!.currentTime = this.layoutDoc._currentTimecode = time; + } timelineHeight = () => this.props.PanelHeight() * (100 - this.heightPercent) / 100; trimEndFunc = () => this.duration; @computed get renderTimeline() { @@ -550,15 +570,6 @@ export class VideoBox extends ViewBoxAnnotatableComponent
; } -- cgit v1.2.3-70-g09d2 From 534e21a74f646b2e6fd009f6bf910679869797b4 Mon Sep 17 00:00:00 2001 From: bobzel Date: Fri, 24 Sep 2021 22:39:30 -0400 Subject: from last --- src/client/views/collections/CollectionStackedTimeline.tsx | 2 +- src/client/views/nodes/AudioBox.tsx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) (limited to 'src/client/views/collections') diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index 48014921a..b6321043c 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -622,7 +622,7 @@ export class CollectionStackedTimeline extends CollectionSubView< }} /> - {this.IsTrimming && ( + {this.IsTrimming !== TrimScope.None && ( <>
{ const eleTime = this._ele!.currentTime; - const start = eleTime === this.timeline?.trimDuration ? NumCast(this.layoutDoc.trimStart) : eleTime; + const start = eleTime === this.timeline?.trimDuration ? this.timeline.trimStart : eleTime; this.playFrom(start, undefined, true); e?.stopPropagation?.(); } -- cgit v1.2.3-70-g09d2 From 93996d3a25733fbf90b24e9d671aa899b2055e47 Mon Sep 17 00:00:00 2001 From: bobzel Date: Sat, 25 Sep 2021 10:44:16 -0400 Subject: fixed play() bug from last. fixed highlight of punch in/punch out region with spacebar --- src/client/views/collections/CollectionStackedTimeline.tsx | 9 +++++++++ src/client/views/nodes/AudioBox.tsx | 3 +-- src/client/views/nodes/VideoBox.tsx | 3 +-- 3 files changed, 11 insertions(+), 4 deletions(-) (limited to 'src/client/views/collections') diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index b6321043c..f533bee52 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -145,12 +145,21 @@ export class CollectionStackedTimeline extends CollectionSubView< })!; } + _disposer: IReactionDisposer | undefined; componentDidMount() { document.addEventListener("keydown", this.keyEvents, true); + this._disposer = reaction(() => this.currentTime, + () => { + if (CollectionStackedTimeline.SelectingRegion === this) { + this._markerEnd = this.currentTime; + } + + }); } @action componentWillUnmount() { + this._disposer?.(); document.removeEventListener("keydown", this.keyEvents, true); if (CollectionStackedTimeline.SelectingRegion === this) { CollectionStackedTimeline.SelectingRegion = undefined; diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 7eb5639c0..2574b5a45 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -222,8 +222,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent -1) { + } else if (seekTimeInSeconds < 0 && seekTimeInSeconds > -1) { setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000); } else { this.Pause(); diff --git a/src/client/views/nodes/VideoBox.tsx b/src/client/views/nodes/VideoBox.tsx index af65cce9f..b00fb75a3 100644 --- a/src/client/views/nodes/VideoBox.tsx +++ b/src/client/views/nodes/VideoBox.tsx @@ -510,8 +510,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent this.Pause(), (this._playRegionDuration) * 1000); // use setTimeout to play a specific duration } - } - if (seekTimeInSeconds < 0 && seekTimeInSeconds > -1) { + } else if (seekTimeInSeconds < 0 && seekTimeInSeconds > -1) { setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000); } else { this.Pause(); -- cgit v1.2.3-70-g09d2 From e3c516cc0b7c0e31994e673d4fee4afbb2b7d3c1 Mon Sep 17 00:00:00 2001 From: bobzel Date: Sat, 25 Sep 2021 10:56:18 -0400 Subject: better version of punch in/out without reactions. --- .../collections/CollectionStackedTimeline.tsx | 31 +++++++--------------- src/client/views/nodes/AudioBox.tsx | 2 -- 2 files changed, 9 insertions(+), 24 deletions(-) (limited to 'src/client/views/collections') diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index f533bee52..cbf232d3d 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -5,7 +5,6 @@ import { IReactionDisposer, observable, reaction, - runInAction, } from "mobx"; import { observer } from "mobx-react"; import { computedFn } from "mobx-utils"; @@ -83,7 +82,7 @@ export class CollectionStackedTimeline extends CollectionSubView< private _timeline: HTMLDivElement | null = null; private _markerStart: number = 0; - @observable _markerEnd: number = 0; + @observable _markerEnd: number | undefined; @observable _trimming: number = TrimScope.None; @observable _trimStart: number = 0; @observable _trimEnd: number = 0; @@ -115,12 +114,13 @@ export class CollectionStackedTimeline extends CollectionSubView< } @computed get selectionContainer() { - return CollectionStackedTimeline.SelectingRegion !== this ? null : ( + const markerEnd = CollectionStackedTimeline.SelectingRegion === this ? this.currentTime : this._markerEnd; + return markerEnd === undefined ? null : (
); @@ -145,21 +145,12 @@ export class CollectionStackedTimeline extends CollectionSubView< })!; } - _disposer: IReactionDisposer | undefined; componentDidMount() { document.addEventListener("keydown", this.keyEvents, true); - this._disposer = reaction(() => this.currentTime, - () => { - if (CollectionStackedTimeline.SelectingRegion === this) { - this._markerEnd = this.currentTime; - } - - }); } @action componentWillUnmount() { - this._disposer?.(); document.removeEventListener("keydown", this.keyEvents, true); if (CollectionStackedTimeline.SelectingRegion === this) { CollectionStackedTimeline.SelectingRegion = undefined; @@ -200,6 +191,7 @@ export class CollectionStackedTimeline extends CollectionSubView< this._markerStart, this._markerEnd ); + this._markerEnd = undefined; CollectionStackedTimeline.SelectingRegion = undefined; } } @@ -229,17 +221,13 @@ export class CollectionStackedTimeline extends CollectionSubView< if (rect && this.props.isContentActive()) { const wasPlaying = this.props.playing(); if (wasPlaying) this.props.Pause(); - var wasSelecting = CollectionStackedTimeline.SelectingRegion === this; + var wasSelecting = this._markerEnd !== undefined; setupMoveUpEvents( this, e, action((e) => { - if ( - !wasSelecting && - CollectionStackedTimeline.SelectingRegion !== this - ) { + if (!wasSelecting) { this._markerStart = this._markerEnd = this.toTimeline(clientX - rect.x, rect.width); - CollectionStackedTimeline.SelectingRegion = this; wasSelecting = true; } this._markerEnd = this.toTimeline(e.clientX - rect.x, rect.width); @@ -254,7 +242,6 @@ export class CollectionStackedTimeline extends CollectionSubView< } if ( !isClick && - CollectionStackedTimeline.SelectingRegion === this && Math.abs(movement[0]) > 15 && !this.IsTrimming ) { @@ -270,7 +257,7 @@ export class CollectionStackedTimeline extends CollectionSubView< setTimeout(() => DocumentManager.Instance.getDocumentView(anchor)?.select(false)); } (!isClick || !wasSelecting) && - (CollectionStackedTimeline.SelectingRegion = undefined); + (this._markerEnd = undefined); }), (e, doubleTap) => { this.props.select(false); diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 2574b5a45..f52b54d01 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -71,8 +71,6 @@ export class AudioBox extends ViewBoxAnnotatableComponent = this.layoutDoc._height; @observable _paused: boolean = false; @computed get recordingStart() { return DateCast(this.dataDoc[this.fieldKey + "-recordingStart"])?.date.getTime(); } -- cgit v1.2.3-70-g09d2 From 7d415bca08a17481d44d6bcd3b0df2672f6f5db0 Mon Sep 17 00:00:00 2001 From: bobzel Date: Sat, 25 Sep 2021 12:30:35 -0400 Subject: added a hacky fix to what seems to be a Chrome bug when auto expanding the left flyout panel --- src/client/views/MainView.tsx | 6 ++++++ src/client/views/collections/collectionFreeForm/MarqueeView.tsx | 3 +-- 2 files changed, 7 insertions(+), 2 deletions(-) (limited to 'src/client/views/collections') diff --git a/src/client/views/MainView.tsx b/src/client/views/MainView.tsx index 35c5801e5..7edcd6217 100644 --- a/src/client/views/MainView.tsx +++ b/src/client/views/MainView.tsx @@ -467,7 +467,13 @@ export class MainView extends React.Component { } expandFlyout = action((button: Doc) => { + // bcz: What's going on here!? + // Chrome(not firefox) seems to have a bug when the flyout expands and there's a zoomed freeform tab. All of the div below the CollectionFreeFormView's main div + // generate the wrong value from getClientRectangle() -- specifically they return an 'x' that is the flyout's width greater than it should be. + // interactively adjusting the flyout fixes the problem. So does programmatically changing the value after a timeout to something *fractionally* different (ie, 1.5, not 1);) this._leftMenuFlyoutWidth = (this._leftMenuFlyoutWidth || 250); + setTimeout(action(() => this._leftMenuFlyoutWidth += 0.5), 0); + this._sidebarContent.proto = button.target as any; this.LastButton = button; }); diff --git a/src/client/views/collections/collectionFreeForm/MarqueeView.tsx b/src/client/views/collections/collectionFreeForm/MarqueeView.tsx index 81f6307d1..24a7d77e0 100644 --- a/src/client/views/collections/collectionFreeForm/MarqueeView.tsx +++ b/src/client/views/collections/collectionFreeForm/MarqueeView.tsx @@ -646,8 +646,7 @@ export class MarqueeView extends React.Component e.preventDefault()} -- cgit v1.2.3-70-g09d2 From a793b7e981e46b7c98bee6cce3faaf3a5b05f6ae Mon Sep 17 00:00:00 2001 From: bobzel Date: Sun, 26 Sep 2021 02:55:23 -0400 Subject: fixed warnings, fixed bug following link w/auto play. plus refactorings. --- src/client/views/AudioWaveform.tsx | 11 +- .../collections/CollectionStackedTimeline.tsx | 78 ++-- src/client/views/nodes/AudioBox.tsx | 291 +++++++-------- src/client/views/nodes/LabelBox.tsx | 2 +- src/client/views/nodes/VideoBox.tsx | 405 ++++++++++----------- 5 files changed, 373 insertions(+), 414 deletions(-) (limited to 'src/client/views/collections') diff --git a/src/client/views/AudioWaveform.tsx b/src/client/views/AudioWaveform.tsx index 0e9c00656..270b3869c 100644 --- a/src/client/views/AudioWaveform.tsx +++ b/src/client/views/AudioWaveform.tsx @@ -25,14 +25,13 @@ export interface AudioWaveformProps { export class AudioWaveform extends React.Component { public static NUMBER_OF_BUCKETS = 100; _disposer: IReactionDisposer | undefined; - @computed get _waveHeight() { - return Math.max(50, this.props.PanelHeight()); - } - + @computed get waveHeight() { return Math.max(50, this.props.PanelHeight()); } @computed get clipStart() { return this.props.clipStart; } @computed get clipEnd() { return this.props.clipEnd; } - audioBucketField = (start: number, end: number) => { return "audioBuckets/" + start.toFixed(2).replace(".", "_") + "/" + end.toFixed(2).replace(".", "_"); } @computed get audioBuckets() { return Cast(this.props.layoutDoc[this.audioBucketField(this.clipStart, this.clipEnd)], listSpec("number"), []); } + + audioBucketField = (start: number, end: number) => "audioBuckets/" + start.toFixed(2).replace(".", "_") + "/" + end.toFixed(2).replace(".", "_"); + componentWillUnmount() { this._disposer?.(); } @@ -87,7 +86,7 @@ export class AudioWaveform extends React.Component {
- ); - } - constructor(props: any) { super(props); // onClick play scripts @@ -157,8 +129,23 @@ export class CollectionStackedTimeline extends CollectionSubView< } } - anchorStart = (anchor: Doc) => NumCast(anchor._timecodeToShow, NumCast(anchor[this.props.startTag])) - anchorEnd = (anchor: Doc, val: any = null) => NumCast(anchor._timecodeToHide, NumCast(anchor[this.props.endTag], val)); + public get IsTrimming() { return this._trimming; } + + @action + public StartTrimming(scope: TrimScope) { + this._trimStart = this.clipStart; + this._trimEnd = this.clipEnd; + this._trimming = scope; + } + @action + public StopTrimming() { + this.layoutDoc.clipStart = this.trimStart; + this.layoutDoc.clipEnd = this.trimEnd; + this._trimming = TrimScope.None; + } + + anchorStart = (anchor: Doc) => NumCast(anchor._timecodeToShow, NumCast(anchor[this.props.startTag])); + anchorEnd = (anchor: Doc, val: any = null) => NumCast(anchor._timecodeToHide, NumCast(anchor[this.props.endTag], val) ?? null); toTimeline = (screen_delta: number, width: number) => { return Math.max( this.clipStart, @@ -345,13 +332,13 @@ export class CollectionStackedTimeline extends CollectionSubView< const localPt = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); const x = localPt[0] - docDragData.offset[0]; const timelinePt = this.toTimeline(x, this.props.PanelWidth()); - for (let i = 0; i < docDragData.droppedDocuments.length; i++) { - const d = Doc.GetProto(docDragData.droppedDocuments[i]); + docDragData.droppedDocuments.forEach(drop => { + const d = Doc.GetProto(drop); if (this.anchorEnd(d) !== undefined) { d[d._timecodeToHide === undefined ? this.props.endTag : "_timecodeToHide"] = timelinePt + this.anchorEnd(d) - this.anchorStart(d); } d[d._timecodToShow === undefined ? this.props.startTag : "_timecodToShow"] = timelinePt; - } + }); return true; } @@ -483,9 +470,12 @@ export class CollectionStackedTimeline extends CollectionSubView< } dictationHeightPercent = 50; - dictationHeight = () => (this.props.PanelHeight() * (100 - this.dictationHeightPercent)) / 100 - timelineContentHeight = () => (this.props.PanelHeight() * this.dictationHeightPercent) / 100 - dictationScreenToLocalTransform = () => this.props.ScreenToLocalTransform().translate(0, -this.timelineContentHeight()) + dictationHeight = () => (this.props.PanelHeight() * (100 - this.dictationHeightPercent)) / 100; + timelineContentHeight = () => (this.props.PanelHeight() * this.dictationHeightPercent) / 100; + dictationScreenToLocalTransform = () => this.props.ScreenToLocalTransform().translate(0, -this.timelineContentHeight()); + isContentActive = () => this.props.isSelected() || this.props.isContentActive(); + currentTimecode = () => this.currentTime; + @computed get renderDictation() { const dictation = Cast(this.dataDoc[this.props.dictationKey], Doc, null); return !dictation ? null : ( @@ -537,9 +527,19 @@ export class CollectionStackedTimeline extends CollectionSubView<
); } + @computed get selectionContainer() { + const markerEnd = CollectionStackedTimeline.SelectingRegion === this ? this.currentTime : this._markerEnd; + return markerEnd === undefined ? null : ( +
+ ); + } - isContentActive = () => this.props.isSelected() || this.props.isContentActive(); - currentTimecode = () => this.currentTime; render() { const timelineContentWidth = this.props.PanelWidth(); const overlaps: { @@ -760,7 +760,7 @@ class StackedTimelineAnchor extends React.Component e, (e) => { if (!undo) undo = UndoManager.StartBatch("drag anchor"); - return changeAnchor(anchor, left, newTime(e)) + return changeAnchor(anchor, left, newTime(e)); }, (e) => { this.props.setTime(newTime(e)); diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 6e6558030..fa78d2301 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -1,6 +1,6 @@ import React = require("react"); import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; -import { action, computed, IReactionDisposer, observable, reaction, runInAction } from "mobx"; +import { action, computed, IReactionDisposer, observable, runInAction } from "mobx"; import { observer } from "mobx-react"; import { DateField } from "../../../fields/DateField"; import { Doc, DocListCast } from "../../../fields/Doc"; @@ -19,10 +19,8 @@ import { CollectionStackedTimeline, TrimScope } from "../collections/CollectionS import { ContextMenu } from "../ContextMenu"; import { ContextMenuProps } from "../ContextMenuItem"; import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from "../DocComponent"; -import { Colors } from "../global/globalEnums"; import "./AudioBox.scss"; import { FieldView, FieldViewProps } from "./FieldView"; -import { LinkDocPreview } from "./LinkDocPreview"; declare class MediaRecorder { constructor(e: any); // whatever MediaRecorder has @@ -36,10 +34,14 @@ enum media_state { Recording = "recording", Paused = "paused", Playing = "playing" -}; +} @observer export class AudioBox extends ViewBoxAnnotatableComponent(AudioDocument) { public static LayoutString(fieldKey: string) { return FieldView.LayoutString(AudioBox, fieldKey); } + public static SetScrubTime = action((timeInMillisFrom1970: number) => { + AudioBox._scrubTime = 0; + AudioBox._scrubTime = timeInMillisFrom1970; + }); public static Enabled = false; static playheadWidth = 40; // width of playhead static heightPercent = 75; // height of timeline in percent of height of audioBox. @@ -63,13 +65,30 @@ export class AudioBox extends ViewBoxAnnotatableComponent { - AudioBox._scrubTime = 0; - AudioBox._scrubTime = timeInMillisFrom1970; - }); + get timeline() { return this._stackedTimeline.current; } // can't be computed since it's not observable + + componentWillUnmount() { + this._dropDisposer?.(); + Object.values(this._disposers).forEach((disposer) => disposer?.()); + const ind = DocUtils.ActiveRecordings.indexOf(this); + ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); + } + + @action + componentDidMount() { + this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link. + + this.mediaState = this.path ? media_state.Paused : undefined as any as media_state; + + this.path && this.setAnchorTime(NumCast(this.layoutDoc.clipStart)); + this.path && this.timecodeChanged(); + } getLinkData(l: Doc) { let la1 = l.anchor1 as Doc; @@ -100,34 +119,15 @@ export class AudioBox extends ViewBoxAnnotatableComponent disposer?.()); - const ind = DocUtils.ActiveRecordings.indexOf(this); - ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); - } - - @action - componentDidMount() { - this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link. - - this.mediaState = this.path ? media_state.Paused : undefined as any as media_state; - - this.path && this.setAnchorTime(NumCast(this.layoutDoc.clipStart)); - this.path && this.timecodeChanged(); - } - // for updating the timecode @action timecodeChanged = () => { if (this.mediaState !== media_state.Recording && this._ele) { this.links - .map((l) => this.getLinkData(l)) + .map(l => this.getLinkData(l)) .forEach(({ la1, la2, linkTime }) => { - if ( - linkTime > NumCast(this.layoutDoc._currentTimecode) && - linkTime < this._ele!.currentTime - ) { + if (linkTime > NumCast(this.layoutDoc._currentTimecode) && + linkTime < this._ele!.currentTime) { Doc.linkFollowHighlight(la1); } }); @@ -135,23 +135,11 @@ export class AudioBox extends ViewBoxAnnotatableComponent { - this._ele!.pause(); - this.mediaState = media_state.Paused; - }); - - // play audio for documents created during recording - playFromTime = (absoluteTime: number) => { - this.recordingStart && - this.playFrom((absoluteTime - this.recordingStart) / 1000); - } - // play back the audio from time @action - playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false): any => { - clearTimeout(this._play); - if (Number.isNaN(this._ele?.duration)) { + playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false) => { + clearTimeout(this._play); // abort any previous clip ending + if (Number.isNaN(this._ele?.duration)) { // audio element isn't loaded yet... wait 1/2 second and try again setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500); } else if (this.timeline && this._ele && AudioBox.Enabled) { @@ -160,18 +148,13 @@ export class AudioBox extends ViewBoxAnnotatableComponent= 0 && this.timeline.trimStart <= end && seekTimeInSeconds <= this.timeline.trimEnd) { this._ele.currentTime = start; this._ele.play(); - runInAction(() => this.mediaState = media_state.Playing); - if (end !== this.timeline.clipDuration) { - return this._play = setTimeout( - () => { - if (fullPlay) this.setAnchorTime(this.timeline!.trimStart); - this.Pause(); - }, - (end - start) * 1000 - ); // use setTimeout to play a specific duration - } - } else if (seekTimeInSeconds < 0 && seekTimeInSeconds > -1) { - setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000); + this.mediaState = media_state.Playing; + this._play = setTimeout( + () => { + if (fullPlay) this.setAnchorTime(this.timeline!.trimStart); + this.Pause(); + }, + (end - start) * 1000); } else { this.Pause(); } @@ -185,8 +168,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent this.mediaState = media_state.Recording); - setTimeout(this.updateRecordTime, 0); + setTimeout(this.updateRecordTime); this._recorder.start(); - setTimeout(() => this.stopRecording(), 60 * 60 * 1000); // stop after an hour + setTimeout(this.stopRecording, 60 * 60 * 1000); // stop after an hour + } + + @action + stopRecording = () => { + if (this._recorder) { + this._recorder.stop(); + this._recorder = undefined; + this.dataDoc[this.fieldKey + "-duration"] = (new Date().getTime() - this._recordStart - this.pauseTime) / 1000; + this.mediaState = media_state.Paused; + this._stream?.getAudioTracks()[0].stop(); + const ind = DocUtils.ActiveRecordings.indexOf(this); + ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); + } } // context menu @@ -243,22 +238,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent { - if (this._recorder) { - this._recorder.stop(); - this._recorder = undefined; - this.dataDoc[this.fieldKey + "-duration"] = - (new Date().getTime() - this._recordStart - this.pauseTime) / 1000; - this.mediaState = media_state.Paused; - this._stream?.getAudioTracks()[0].stop(); - const ind = DocUtils.ActiveRecordings.indexOf(this); - ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); - } - }); - // button for starting and stopping the recording - recordClick = (e: React.MouseEvent) => { + Record = (e: React.MouseEvent) => { if (e.button === 0 && !e.ctrlKey) { this._recorder ? this.stopRecording() : this.recordAudioAnnotation(); e.stopPropagation(); @@ -267,12 +248,19 @@ export class AudioBox extends ViewBoxAnnotatableComponent { - const eleTime = this._ele!.currentTime; - const start = eleTime === this.timeline?.trimDuration ? this.timeline.trimStart : eleTime; + const eleTime = this._ele?.currentTime || 0; + const start = eleTime === this.timeline?.trimEnd ? this.timeline.trimStart : eleTime; this.playFrom(start, undefined, true); e?.stopPropagation?.(); } + // pause play back + @action + Pause = () => { + this._ele?.pause(); + this.mediaState = media_state.Paused; + } + // creates a text document for dictation onFile = (e: any) => { const newDoc = CurrentUserUtils.GetNewTextDoc( @@ -302,27 +290,6 @@ export class AudioBox extends ViewBoxAnnotatableComponent { - const duration = this._ele?.duration; - if (duration && duration !== Infinity) { - this.dataDoc[this.fieldKey + "-duration"] = duration; - } - })} - className={`audiobox-control${this.props.isContentActive() ? "-interactive" : ""}`}> - - Not supported. - ; - } - // pause the time during recording phase @action recordPause = (e: React.MouseEvent) => { @@ -341,14 +308,12 @@ export class AudioBox extends ViewBoxAnnotatableComponent this.mediaState === media_state.Playing; playLink = (link: Doc) => { if (link.annotationOn === this.rootDoc) { if (!this.layoutDoc.dontAutoPlayFollowedLinks) { this.playFrom(this.timeline?.anchorStart(link) || 0, this.timeline?.anchorEnd(link)); } else { - this._ele!.currentTime = this.layoutDoc._currentTimecode = - this.timeline?.anchorStart(link) || 0; + this._ele!.currentTime = this.layoutDoc._currentTimecode = this.timeline?.anchorStart(link) || 0; } } else { this.links @@ -368,6 +333,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent this.mediaState === media_state.Playing; isActiveChild = () => this._isAnyChildContentActive; timelineWhenChildContentsActiveChanged = (isActive: boolean) => this.props.whenChildContentsActiveChanged( @@ -380,54 +346,21 @@ export class AudioBox extends ViewBoxAnnotatableComponent { - (this._ele!.currentTime = this.layoutDoc._currentTimecode = time); - } + setAnchorTime = (time: number) => this._ele!.currentTime = this.layoutDoc._currentTimecode = time; + timelineWidth = () => this.props.PanelWidth() - AudioBox.playheadWidth; timelineHeight = () => (((this.props.PanelHeight() * AudioBox.heightPercent) / 100) * AudioBox.heightPercent) / 100 // panelHeight * heightPercent is player height. * heightPercent is timeline height (as per css inline) - timelineWidth = () => this.props.PanelWidth() - AudioBox.playheadWidth; - @computed get renderTimeline() { - return ( - - ); - } - // hides trim controls and displays new clip + + @undoBatch - finishTrim = action(() => { + finishTrim = () => { // hides trim controls and displays new clip this.Pause(); this.setAnchorTime(Math.max(Math.min(this.timeline?.trimEnd || 0, this._ele!.currentTime), this.timeline?.trimStart || 0)); this.timeline?.StopTrimming(); - }); + } + startTrim = (scope: TrimScope) => { this.Pause(); this.timeline?.StartTrimming(scope); @@ -444,6 +377,18 @@ export class AudioBox extends ViewBoxAnnotatableComponent { + if (r && this.timeline) { + this._dropDisposer?.(); + this._dropDisposer = DragManager.MakeDropTarget(r, + (e, de) => { + const [xp, yp] = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); + de.complete.docDragData && this.timeline!.internalDocDrop(e, de, de.complete.docDragData, xp); + }, + this.layoutDoc, undefined); + } + } + @computed get recordingControls() { return
@@ -453,8 +398,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent
{[media_state.Recording, media_state.Playing].includes(this.mediaState) ? -
e.stopPropagation()}> -
+
e.stopPropagation()}> +
@@ -522,16 +467,52 @@ export class AudioBox extends ViewBoxAnnotatableComponent; } - setupTimelineDrop = (r: HTMLDivElement | null) => { - if (r && this.timeline) { - this._dropDisposer?.(); - this._dropDisposer = DragManager.MakeDropTarget(r, - (e, de) => { - const [xp, yp] = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); - de.complete.docDragData && this.timeline!.internalDocDrop(e, de, de.complete.docDragData, xp); - }, - this.layoutDoc, undefined); - } + @computed get renderTimeline() { + return ( + + ); + } + // returns the html audio element + @computed get audio() { + return ; } render() { diff --git a/src/client/views/nodes/LabelBox.tsx b/src/client/views/nodes/LabelBox.tsx index 935c878ee..97b1aac86 100644 --- a/src/client/views/nodes/LabelBox.tsx +++ b/src/client/views/nodes/LabelBox.tsx @@ -111,7 +111,7 @@ export class LabelBox extends ViewBoxBaseComponent<(FieldViewProps & LabelBoxPro verticalAlign: "center", textAlign: "center", whiteSpace: "nowrap" - }) + }); } }}>{label.startsWith("#") ? (null) : label}
diff --git a/src/client/views/nodes/VideoBox.tsx b/src/client/views/nodes/VideoBox.tsx index 3435c2a24..2befb4128 100644 --- a/src/client/views/nodes/VideoBox.tsx +++ b/src/client/views/nodes/VideoBox.tsx @@ -1,5 +1,6 @@ import React = require("react"); import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; +import { Tooltip } from "@material-ui/core"; import { action, computed, IReactionDisposer, observable, ObservableMap, reaction, runInAction, untracked } from "mobx"; import { observer } from "mobx-react"; import * as rp from 'request-promise'; @@ -8,13 +9,16 @@ import { documentSchema } from "../../../fields/documentSchemas"; import { InkTool } from "../../../fields/InkField"; import { makeInterface } from "../../../fields/Schema"; import { Cast, NumCast, StrCast } from "../../../fields/Types"; -import { AudioField, nullAudio, VideoField } from "../../../fields/URLField"; -import { emptyFunction, formatTime, OmitKeys, returnOne, setupMoveUpEvents, Utils, returnFalse, returnZero } from "../../../Utils"; +import { AudioField, VideoField } from "../../../fields/URLField"; +import { emptyFunction, formatTime, OmitKeys, returnFalse, returnOne, setupMoveUpEvents, Utils } from "../../../Utils"; import { Docs, DocUtils } from "../../documents/Documents"; +import { DocumentType } from "../../documents/DocumentTypes"; import { Networking } from "../../Network"; import { CurrentUserUtils } from "../../util/CurrentUserUtils"; +import { DocumentManager } from "../../util/DocumentManager"; import { SelectionManager } from "../../util/SelectionManager"; import { SnappingManager } from "../../util/SnappingManager"; +import { undoBatch } from "../../util/UndoManager"; import { CollectionFreeFormView } from "../collections/collectionFreeForm/CollectionFreeFormView"; import { CollectionStackedTimeline, TrimScope } from "../collections/CollectionStackedTimeline"; import { ContextMenu } from "../ContextMenu"; @@ -22,16 +26,10 @@ import { ContextMenuProps } from "../ContextMenuItem"; import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from "../DocComponent"; import { DocumentDecorations } from "../DocumentDecorations"; import { MarqueeAnnotator } from "../MarqueeAnnotator"; +import { AnchorMenu } from "../pdf/AnchorMenu"; import { StyleProp } from "../StyleProvider"; import { FieldView, FieldViewProps } from './FieldView'; -import { LinkDocPreview } from "./LinkDocPreview"; import "./VideoBox.scss"; -import { DragManager } from "../../util/DragManager"; -import { DocumentManager } from "../../util/DocumentManager"; -import { DocumentType } from "../../documents/DocumentTypes"; -import { Tooltip } from "@material-ui/core"; -import { AnchorMenu } from "../pdf/AnchorMenu"; -import { undoBatch } from "../../util/UndoManager"; const path = require('path'); type VideoDocument = makeInterface<[typeof documentSchema]>; @@ -40,14 +38,30 @@ const VideoDocument = makeInterface(documentSchema); @observer export class VideoBox extends ViewBoxAnnotatableComponent(VideoDocument) { public static LayoutString(fieldKey: string) { return FieldView.LayoutString(VideoBox, fieldKey); } + static async convertDataUri(imageUri: string, returnedFilename: string) { + try { + const posting = Utils.prepend("/uploadURI"); + const returnedUri = await rp.post(posting, { + body: { + uri: imageUri, + name: returnedFilename + }, + json: true, + }); + return returnedUri; + + } catch (e) { + console.log("VideoBox :" + e); + } + } static _youtubeIframeCounter: number = 0; - static Instance: VideoBox; static heightPercent = 60; // height of timeline in percent of height of videoBox. private _disposers: { [name: string]: IReactionDisposer } = {}; private _youtubePlayer: YT.Player | undefined = undefined; private _videoRef: HTMLVideoElement | null = null; private _youtubeIframeId: number = -1; private _youtubeContentCreated = false; + private _audioPlayer: HTMLAudioElement | null = null; private _stackedTimeline = React.createRef(); private _mainCont: React.RefObject = React.createRef(); private _annotationLayer: React.RefObject = React.createRef(); @@ -62,47 +76,52 @@ export class VideoBox extends ViewBoxAnnotatableComponent arr[arr.length - 1])(field.url.href.split("/")) : ""; + } + // returns the path of the audio file + @computed get audiopath() { + const field = Cast(this.props.Document[this.props.fieldKey + '-audio'], AudioField, null); + const vfield = Cast(this.dataDoc[this.fieldKey], VideoField, null); + return field?.url.href ?? vfield?.url.href ?? ""; } + private get timeline() { return this._stackedTimeline.current; } private get transition() { return this._clicking ? "left 0.5s, width 0.5s, height 0.5s" : ""; } public get player(): HTMLVideoElement | null { return this._videoRef; } - constructor(props: Readonly) { - super(props); - VideoBox.Instance = this; - } - - getAnchor = () => { - const timecode = Cast(this.layoutDoc._currentTimecode, "number", null); - const marquee = AnchorMenu.Instance.GetAnchor?.(); - return CollectionStackedTimeline.createAnchor(this.rootDoc, this.dataDoc, this.annotationKey, "_timecodeToShow"/* videoStart */, "_timecodeToHide" /* videoEnd */, timecode ? timecode : undefined, undefined, marquee) || this.rootDoc; + componentDidMount() { + this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link. + if (this.youtubeVideoId) { + const youtubeaspect = 400 / 315; + const nativeWidth = Doc.NativeWidth(this.layoutDoc); + const nativeHeight = Doc.NativeHeight(this.layoutDoc); + if (!nativeWidth || !nativeHeight) { + if (!nativeWidth) Doc.SetNativeWidth(this.dataDoc, 600); + Doc.SetNativeHeight(this.dataDoc, (nativeWidth || 600) / youtubeaspect); + this.layoutDoc._height = (this.layoutDoc._width || 0) / youtubeaspect; + } + } } - videoLoad = () => { - const aspect = this.player!.videoWidth / this.player!.videoHeight; - Doc.SetNativeWidth(this.dataDoc, this.player!.videoWidth); - Doc.SetNativeHeight(this.dataDoc, this.player!.videoHeight); - this.layoutDoc._height = (this.layoutDoc._width || 0) / aspect; - if (Number.isFinite(this.player!.duration)) { - this.dataDoc[this.fieldKey + "-duration"] = this.player!.duration; - } + componentWillUnmount() { + this.Pause(); + Object.keys(this._disposers).forEach(d => this._disposers[d]?.()); } @action public Play = (update: boolean = true) => { this._playing = true; + const eleTime = this.player?.currentTime || 0; + const start = eleTime >= (this.timeline?.trimEnd || 0) ? this.timeline?.trimStart || 0 : eleTime; try { this._audioPlayer && this.player && (this._audioPlayer.currentTime = this.player?.currentTime); - update && this.player && this.playFrom(this.player.currentTime); + update && this.player && this.playFrom(start, undefined, true); update && this._audioPlayer?.play(); update && this._youtubePlayer?.playVideo(); this._youtubePlayer && !this._playTimer && (this._playTimer = setInterval(this.updateTimecode, 5)); @@ -190,7 +209,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent { + createRealSummaryLink = (imagePath: string, downX?: number, downY?: number) => { const url = !imagePath.startsWith("/") ? Utils.CorsProxy(imagePath) : imagePath; const width = this.layoutDoc._width || 1; const height = this.layoutDoc._height || 0; @@ -208,11 +227,25 @@ export class VideoBox extends ViewBoxAnnotatableComponent { + const timecode = Cast(this.layoutDoc._currentTimecode, "number", null); + const marquee = AnchorMenu.Instance.GetAnchor?.(); + return CollectionStackedTimeline.createAnchor(this.rootDoc, this.dataDoc, this.annotationKey, "_timecodeToShow"/* videoStart */, "_timecodeToHide" /* videoEnd */, timecode ? timecode : undefined, undefined, marquee) || this.rootDoc; + } + + videoLoad = () => { + const aspect = this.player!.videoWidth / this.player!.videoHeight; + Doc.SetNativeWidth(this.dataDoc, this.player!.videoWidth); + Doc.SetNativeHeight(this.dataDoc, this.player!.videoHeight); + this.layoutDoc._height = (this.layoutDoc._width || 0) / aspect; + if (Number.isFinite(this.player!.duration)) { + this.dataDoc[this.fieldKey + "-duration"] = this.player!.duration; + } + } + @action updateTimecode = () => { this.player && (this.layoutDoc._currentTimecode = this.player.currentTime); - this.layoutDoc.clipEnd = this.layoutDoc.clipEnd ? Math.min(this.duration, NumCast(this.layoutDoc.clipEnd)) : this.duration; - this._trimEnd = this._trimEnd ? Math.min(this.duration, this._trimEnd) : this.duration; try { this._youtubePlayer && (this.layoutDoc._currentTimecode = this._youtubePlayer.getCurrentTime?.()); } catch (e) { @@ -220,25 +253,6 @@ export class VideoBox extends ViewBoxAnnotatableComponent this._disposers[d]?.()); - } - @action setVideoRef = (vref: HTMLVideoElement | null) => { this._videoRef = vref; @@ -252,23 +266,6 @@ export class VideoBox extends ViewBoxAnnotatableComponent { const field = Cast(this.dataDoc[this.props.fieldKey], VideoField); if (field) { @@ -294,48 +291,8 @@ export class VideoBox extends ViewBoxAnnotatableComponent this._audioPlayer = e; - @computed get content() { - const field = Cast(this.dataDoc[this.fieldKey], VideoField); - const interactive = CurrentUserUtils.SelectedTool !== InkTool.None || !this.props.isSelected() ? "" : "-interactive"; - const style = "videoBox-content" + (this._fullScreen ? "-fullScreen" : "") + interactive; - return !field ?
Loading
: -
-
- - {!this.audiopath || this.audiopath === field.url.href ? (null) : - } -
-
; - } - - @computed get youtubeVideoId() { - const field = Cast(this.dataDoc[this.props.fieldKey], VideoField); - return field && field.url.href.indexOf("youtube") !== -1 ? ((arr: string[]) => arr[arr.length - 1])(field.url.href.split("/")) : ""; - } - - @action youtubeIframeLoaded = (e: any) => { + @action + youtubeIframeLoaded = (e: any) => { if (!this._youtubeContentCreated) { this._forceCreateYouTubeIFrame = !this._forceCreateYouTubeIFrame; return; @@ -344,7 +301,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent { + loadYouTube = (iframe: any) => { let started = true; const onYoutubePlayerStateChange = (event: any) => runInAction(() => { if (started && event.data === YT.PlayerState.PLAYING) { @@ -376,39 +333,6 @@ export class VideoBox extends ViewBoxAnnotatableComponent{"playback"}
} key="play" placement="bottom"> -
- -
- , - {"timecode"}
} key="time" placement="bottom"> -
- {formatTime(curTime)} - {" " + Math.floor((curTime - Math.trunc(curTime)) * 100).toString().padStart(2, "0")} -
- , - {"view full screen"}
} key="full" placement="bottom"> -
- -
- ]; - return
- {[...(VideoBox._nativeControls ? [] : nonNativeControls), - {"snapshot current frame"}
} key="snap" placement="bottom"> -
- -
- , - {"show annotation timeline"}
} key="timeline" placement="bottom"> -
- -
- ,]} -
; - } onPlayDown = () => this._playing ? this.Pause() : this.Play(); @@ -425,10 +349,11 @@ export class VideoBox extends ViewBoxAnnotatableComponent this.Snapshot()); } - onTimelineHdlDown = action((e: React.PointerEvent) => { + @action + onTimelineHdlDown = (e: React.PointerEvent) => { this._clicking = true; setupMoveUpEvents(this, e, - action((e: PointerEvent) => { + action(encodeURIComponent => { this._clicking = false; if (this.props.isContentActive()) { const local = this.props.ScreenToLocalTransform().scale(this.props.scaling?.() || 1).transformPoint(e.clientX, e.clientY); @@ -440,28 +365,17 @@ export class VideoBox extends ViewBoxAnnotatableComponent this._clicking = false), 500); }, this.props.isContentActive(), this.props.isContentActive()); - }); + } onResetDown = (e: React.PointerEvent) => { setupMoveUpEvents(this, e, - (e: PointerEvent) => { + e => { this.Seek(Math.max(0, (this.layoutDoc._currentTimecode || 0) + Math.sign(e.movementX) * 0.0333)); e.stopImmediatePropagation(); return false; }, emptyFunction, - (e: PointerEvent) => this.layoutDoc._currentTimecode = 0); - } - - @computed get youtubeContent() { - this._youtubeIframeId = VideoBox._youtubeIframeCounter++; - this._youtubeContentCreated = this._forceCreateYouTubeIFrame ? true : true; - const style = "videoBox-content-YouTube" + (this._fullScreen ? "-fullScreen" : ""); - const start = untracked(() => Math.round((this.layoutDoc._currentTimecode || 0))); - return