diff options
author | bobzel <zzzman@gmail.com> | 2021-09-23 11:16:53 -0400 |
---|---|---|
committer | bobzel <zzzman@gmail.com> | 2021-09-23 11:16:53 -0400 |
commit | bafc47fe100002e37c7abcada3dc44f3bfb66f62 (patch) | |
tree | 4522253f889f1ea73667ea6ae9d1b09a80f133ef /src | |
parent | fdb55f95c79d568395a6a106248b0901c67bdb1e (diff) |
a bunch of fixes to audio timelines to support undoing clips, simplifying audioWaveform and having it recompute when a clip is made.
Diffstat (limited to 'src')
-rw-r--r-- | src/client/views/AudioWaveform.tsx | 117 | ||||
-rw-r--r-- | src/client/views/collections/CollectionStackedTimeline.tsx | 16 | ||||
-rw-r--r-- | src/client/views/nodes/AudioBox.tsx | 59 | ||||
-rw-r--r-- | src/client/views/nodes/VideoBox.tsx | 12 |
4 files changed, 91 insertions, 113 deletions
diff --git a/src/client/views/AudioWaveform.tsx b/src/client/views/AudioWaveform.tsx index 8f3b7c2cd..1e676e1f0 100644 --- a/src/client/views/AudioWaveform.tsx +++ b/src/client/views/AudioWaveform.tsx @@ -1,6 +1,6 @@ import React = require("react"); import axios from "axios"; -import { action, computed } from "mobx"; +import { action, computed, reaction, IReactionDisposer } from "mobx"; import { observer } from "mobx-react"; import Waveform from "react-audio-waveform"; import { Doc } from "../../fields/Doc"; @@ -12,7 +12,8 @@ import "./AudioWaveform.scss"; import { Colors } from "./global/globalEnums"; export interface AudioWaveformProps { - duration: number; + duration: number; // length of media clip + rawDuration: number; // length of underlying media data mediaPath: string; layoutDoc: Doc; trimming: boolean; @@ -22,19 +23,28 @@ export interface AudioWaveformProps { @observer export class AudioWaveform extends React.Component<AudioWaveformProps> { public static NUMBER_OF_BUCKETS = 100; + _disposer: IReactionDisposer | undefined; @computed get _waveHeight() { return Math.max(50, this.props.PanelHeight()); } + + @computed get clipStart() { return NumCast(this.props.layoutDoc.clipStart); } + @computed get clipEnd() { return NumCast(this.props.layoutDoc.clipEnd, this.props.rawDuration); } + @computed get audioBuckets() { return Cast(this.props.layoutDoc.audioBuckets, listSpec("number"), []); } + @computed get audioBucketRange() { return Cast(this.props.layoutDoc.audioBucketRange, listSpec("number"), [-1, -1]); } + componentWillUnmount() { + this._disposer?.(); + } componentDidMount() { - const audioBuckets = Cast( - this.props.layoutDoc.audioBuckets, - listSpec("number"), - [] - ); - if (!audioBuckets.length) { - this.props.layoutDoc.audioBuckets = new List<number>([0, 0]); /// "lock" to prevent other views from computing the same data - setTimeout(this.createWaveformBuckets); - } + this._disposer = reaction(() => [this.clipStart, this.clipEnd, this.audioBuckets.length, ...this.audioBucketRange], + (range) => { + if (range[2] !== AudioWaveform.NUMBER_OF_BUCKETS || range[3] !== range[0] || range[4] !== range[1]) { + this.props.layoutDoc.audioBucketRange = new List<number>([range[0], range[1]]); // setting these values here serves as a "lock" to prevent multiple attempts to create the waveform at nearly the same time. + this.props.layoutDoc.audioBuckets = new List<number>(numberRange(AudioWaveform.NUMBER_OF_BUCKETS)); + setTimeout(this.createWaveformBuckets); + } + }, { fireImmediately: true }); + } // decodes the audio file into peaks for generating the waveform @@ -45,81 +55,44 @@ export class AudioWaveform extends React.Component<AudioWaveformProps> { context.decodeAudioData( response.data, action((buffer) => { - const decodedAudioData = buffer.getChannelData(0); + const rawDecodedAudioData = buffer.getChannelData(0); + const startInd = this.clipStart / this.props.rawDuration; + const endInd = this.clipEnd / this.props.rawDuration; + const decodedAudioData = rawDecodedAudioData.slice(Math.floor(startInd * rawDecodedAudioData.length), Math.floor(endInd * rawDecodedAudioData.length)); const bucketDataSize = Math.floor( decodedAudioData.length / AudioWaveform.NUMBER_OF_BUCKETS ); const brange = Array.from(Array(bucketDataSize)); - this.props.layoutDoc.audioBuckets = new List<number>( - numberRange(AudioWaveform.NUMBER_OF_BUCKETS).map( - (i: number) => - brange.reduce( - (p, x, j) => - Math.abs( - Math.max(p, decodedAudioData[i * bucketDataSize + j]) - ), - 0 - ) / 2 - ) + const bucketList = numberRange(AudioWaveform.NUMBER_OF_BUCKETS).map( + (i: number) => + brange.reduce( + (p, x, j) => + Math.abs( + Math.max(p, decodedAudioData[i * bucketDataSize + j]) + ), + 0 + ) / 2 ); + this.props.layoutDoc.audioBucketRange = new List<number>([this.clipStart, this.clipEnd]); + this.props.layoutDoc.audioBuckets = new List<number>(bucketList); }) ); } ); } - - @action - createTrimBuckets = () => { - const audioBuckets = Cast( - this.props.layoutDoc.audioBuckets, - listSpec("number"), - [] - ); - - const start = Math.floor( - (NumCast(this.props.layoutDoc.clipStart) / this.props.duration) * 100 - ); - const end = Math.floor( - (NumCast(this.props.layoutDoc.clipEnd) / this.props.duration) * 100 - ); - return audioBuckets.slice(start, end); - } - render() { - const audioBuckets = Cast( - this.props.layoutDoc.audioBuckets, - listSpec("number"), - [] - ); - return ( <div className="audioWaveform"> - {this.props.trimming || !this.props.layoutDoc.clipEnd ? ( - <Waveform - color={Colors.MEDIUM_BLUE} - height={this._waveHeight} - barWidth={0.1} - pos={this.props.duration} - duration={this.props.duration} - peaks={ - audioBuckets.length === AudioWaveform.NUMBER_OF_BUCKETS - ? audioBuckets - : undefined - } - progressColor={Colors.MEDIUM_BLUE} - /> - ) : ( - <Waveform - color={Colors.MEDIUM_BLUE} - height={this._waveHeight} - barWidth={0.1} - pos={this.props.duration} - duration={this.props.duration} - peaks={this.createTrimBuckets()} - progressColor={Colors.MEDIUM_BLUE} - /> - )} + <Waveform + color={Colors.MEDIUM_BLUE} + height={this._waveHeight} + barWidth={0.1} + pos={this.props.duration} + duration={this.props.duration} + peaks={this.audioBuckets} + progressColor={Colors.MEDIUM_BLUE} + /> </div> ); } diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index 970947b12..0985e5b2e 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -61,9 +61,10 @@ export type CollectionStackedTimelineProps = { endTag: string; mediaPath: string; dictationKey: string; + rawDuration: number; trimming: boolean; - trimStart: number; - trimEnd: number; + trimStart: () => number; + trimEnd: () => number; trimDuration: number; setStartTrim: (newStart: number) => void; setEndTrim: (newEnd: number) => void; @@ -94,11 +95,11 @@ export class CollectionStackedTimeline extends CollectionSubView< } get trimStart() { - return this.props.trimStart; + return this.props.trimStart(); } get trimEnd() { - return this.props.trimEnd; + return this.props.trimEnd(); } get duration() { @@ -540,6 +541,7 @@ export class CollectionStackedTimeline extends CollectionSubView< return !this.props.mediaPath ? null : ( <div className="collectionStackedTimeline-waveform"> <AudioWaveform + rawDuration={this.props.rawDuration} duration={this.duration} mediaPath={this.props.mediaPath} layoutDoc={this.layoutDoc} @@ -584,11 +586,7 @@ export class CollectionStackedTimeline extends CollectionSubView< : Math.max((start - this.trimStart) / this.props.trimDuration * timelineContentWidth, 0); const top = (d.level / maxLevel) * this.timelineContentHeight() + 15; const timespan = end - start; - let width = (timespan / this.props.trimDuration) * timelineContentWidth; - width = (!this.props.trimming && left == 0) ? - width - ((this.trimStart - start) / this.props.trimDuration * timelineContentWidth) : width; - width = (!this.props.trimming && this.trimEnd < end) ? - width - ((end - this.trimEnd) / this.props.trimDuration * timelineContentWidth) : width; + const width = (timespan / this.duration) * timelineContentWidth; const height = (this.timelineContentHeight()) / maxLevel; return this.props.Document.hideAnchors ? null : ( <div diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 06f1c4ae1..538d7d5cb 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -34,6 +34,7 @@ import "./AudioBox.scss"; import { FieldView, FieldViewProps } from "./FieldView"; import { LinkDocPreview } from "./LinkDocPreview"; import e = require("connect-flash"); +import { undoBatch } from "../../util/UndoManager"; declare class MediaRecorder { constructor(e: any); // whatever MediaRecorder has @@ -74,9 +75,10 @@ export class AudioBox extends ViewBoxAnnotatableComponent< @observable _waveHeight: Opt<number> = this.layoutDoc._height; @observable _paused: boolean = false; @observable _trimming: boolean = false; - @observable _trimStart: number = NumCast(this.layoutDoc.clipStart) ? NumCast(this.layoutDoc.clipStart) : 0; - @observable _trimEnd: number = NumCast(this.layoutDoc.clipEnd) ? NumCast(this.layoutDoc.clipEnd) - : this.duration; + @observable _trimStart: number = NumCast(this.layoutDoc.clipStart); + @observable _trimEnd: number = NumCast(this.layoutDoc.clipEnd, this.duration); + @computed get trimStart() { return this._trimming ? this._trimStart : NumCast(this.layoutDoc.clipStart); } + @computed get trimEnd() { return this._trimming ? this._trimEnd : NumCast(this.layoutDoc.clipEnd, this.duration); } @computed get mediaState(): | undefined @@ -84,7 +86,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< | "recording" | "paused" | "playing" { - return this.dataDoc.mediaState as + return this.layoutDoc.mediaState as | undefined | "pendingRecording" | "recording" @@ -92,7 +94,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< | "playing"; } set mediaState(value) { - this.dataDoc.mediaState = value; + this.layoutDoc.mediaState = value; } public static SetScrubTime = action((timeInMillisFrom1970: number) => { AudioBox._scrubTime = 0; @@ -104,11 +106,15 @@ export class AudioBox extends ViewBoxAnnotatableComponent< DateField )?.date.getTime(); } - @computed get duration() { + @computed get rawDuration() { return NumCast(this.dataDoc[`${this.fieldKey}-duration`]); } + @computed get duration() { + return NumCast(this.layoutDoc.clipEnd, NumCast(this.layoutDoc.clipStart) + NumCast(this.dataDoc[`${this.fieldKey}-duration`])) - NumCast(this.layoutDoc.clipStart); + // NumCast(this.dataDoc[`${this.fieldKey}-duration`]); + } @computed get trimDuration() { - return this._trimming && this._trimEnd ? this.duration : this._trimEnd - this._trimStart; + return this.trimEnd - this.trimStart; } @computed get anchorDocs() { return DocListCast(this.dataDoc[this.annotationKey]); @@ -228,8 +234,6 @@ export class AudioBox extends ViewBoxAnnotatableComponent< runInAction( () => (this.dataDoc[this.fieldKey + "-duration"] = htmlEle.duration) ); - this.layoutDoc.clipEnd = this.layoutDoc.clipEnd ? Math.min(this.duration, NumCast(this.layoutDoc.clipEnd)) : this.duration; - this._trimEnd = this._trimEnd ? Math.min(this.duration, this._trimEnd) : this.duration; this.links .map((l) => this.getLinkData(l)) .forEach(({ la1, la2, linkTime }) => { @@ -259,7 +263,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< // play back the audio from time @action - playFrom = (seekTimeInSeconds: number, endTime: number = this._trimEnd, fullPlay: boolean = false) => { + playFrom = (seekTimeInSeconds: number, endTime: number = this.trimEnd, fullPlay: boolean = false) => { clearTimeout(this._play); if (Number.isNaN(this._ele?.duration)) { setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500); @@ -270,9 +274,9 @@ export class AudioBox extends ViewBoxAnnotatableComponent< } else { this.Pause(); } - } else if (this._trimStart <= endTime && seekTimeInSeconds <= this._trimEnd) { - const start = Math.max(this._trimStart, seekTimeInSeconds); - const end = Math.min(this._trimEnd, endTime); + } else if (this.trimStart <= endTime && seekTimeInSeconds <= this.trimEnd) { + const start = Math.max(this.trimStart, seekTimeInSeconds); + const end = Math.min(this.trimEnd, endTime); this._ele.currentTime = start; this._ele.play(); runInAction(() => (this.mediaState = "playing")); @@ -385,14 +389,14 @@ export class AudioBox extends ViewBoxAnnotatableComponent< Play = (e?: any) => { let start; if (this._ended || this._ele!.currentTime === this.duration) { - start = this._trimStart; + start = NumCast(this.layoutDoc.clipStart); this._ended = false; } else { start = this._ele!.currentTime; } - this.playFrom(start, this._trimEnd, true); + this.playFrom(start, this.trimEnd, true); e?.stopPropagation?.(); } @@ -502,16 +506,16 @@ export class AudioBox extends ViewBoxAnnotatableComponent< } // hides trim controls and displays new clip - @action - finishTrim = () => { + @undoBatch + finishTrim = action(() => { if (this.mediaState === "playing") { this.Pause(); } - this.layoutDoc.clipStart = this._trimStart; - this.layoutDoc.clipEnd = this._trimEnd; + this.layoutDoc.clipStart = this.trimStart; + this.layoutDoc.clipEnd = this.trimEnd; + this.setAnchorTime(Math.max(Math.min(this.trimEnd, this._ele!.currentTime), this.trimStart)); this._trimming = false; - this.setAnchorTime(Math.max(Math.min(this._trimEnd, this._ele!.currentTime), this._trimStart)); - } + }); @action setStartTrim = (newStart: number) => { @@ -544,6 +548,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent< this.heightPercent) / 100 // panelHeight * heightPercent is player height. * heightPercent is timeline height (as per css inline) timelineWidth = () => this.props.PanelWidth() - AudioBox.playheadWidth; + trimEndFunc = () => this.trimEnd; + trimStartFunc = () => this.trimStart; @computed get renderTimeline() { return ( <CollectionStackedTimeline @@ -558,13 +564,12 @@ export class AudioBox extends ViewBoxAnnotatableComponent< focus={DocUtils.DefaultFocus} bringToFront={emptyFunction} CollectionView={undefined} + rawDuration={this.rawDuration} duration={this.duration} playFrom={this.playFrom} setTime={this.setAnchorTime} playing={this.playing} - whenChildContentsActiveChanged={ - this.timelineWhenChildContentsActiveChanged - } + whenChildContentsActiveChanged={this.timelineWhenChildContentsActiveChanged} moveDocument={this.moveDocument} addDocument={this.addDocument} removeDocument={this.removeDocument} @@ -577,8 +582,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent< PanelWidth={this.timelineWidth} PanelHeight={this.timelineHeight} trimming={this._trimming} - trimStart={this._trimStart} - trimEnd={this._trimEnd} + trimStart={this.trimStartFunc} + trimEnd={this.trimEndFunc} trimDuration={this.trimDuration} setStartTrim={this.setStartTrim} setEndTrim={this.setEndTrim} @@ -710,7 +715,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent< <div className="audioBox-current-time"> {this._trimming ? formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode))) - : formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this._trimStart)))} + : formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this.trimStart)))} </div> <div className="audioBox-total-time"> {this._trimming || !this._trimEnd ? diff --git a/src/client/views/nodes/VideoBox.tsx b/src/client/views/nodes/VideoBox.tsx index 3fc460102..84eeacc29 100644 --- a/src/client/views/nodes/VideoBox.tsx +++ b/src/client/views/nodes/VideoBox.tsx @@ -9,7 +9,7 @@ import { InkTool } from "../../../fields/InkField"; import { makeInterface } from "../../../fields/Schema"; import { Cast, NumCast, StrCast } from "../../../fields/Types"; import { AudioField, nullAudio, VideoField } from "../../../fields/URLField"; -import { emptyFunction, formatTime, OmitKeys, returnOne, setupMoveUpEvents, Utils, returnFalse } from "../../../Utils"; +import { emptyFunction, formatTime, OmitKeys, returnOne, setupMoveUpEvents, Utils, returnFalse, returnZero } from "../../../Utils"; import { Docs, DocUtils } from "../../documents/Documents"; import { Networking } from "../../Network"; import { CurrentUserUtils } from "../../util/CurrentUserUtils"; @@ -526,6 +526,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp timelineScreenToLocal = () => this.props.ScreenToLocalTransform().scale(this.scaling()).translate(0, -this.heightPercent / 100 * this.props.PanelHeight()); setAnchorTime = (time: number) => this.player!.currentTime = this.layoutDoc._currentTimecode = time; timelineHeight = () => this.props.PanelHeight() * (100 - this.heightPercent) / 100; + trimEndFunc = () => this.duration; @computed get renderTimeline() { return <div className="videoBox-stackPanel" style={{ transition: this.transition, height: `${100 - this.heightPercent}%` }}> <CollectionStackedTimeline ref={this._stackedTimeline} {...this.props} @@ -538,6 +539,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp bringToFront={emptyFunction} CollectionView={undefined} duration={this.duration} + rawDuration={this.duration} playFrom={this.playFrom} setTime={this.setAnchorTime} playing={this.playing} @@ -550,11 +552,11 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp playLink={this.playLink} PanelHeight={this.timelineHeight} trimming={false} - trimStart={0} - trimEnd={this.duration} + trimStart={returnZero} + trimEnd={this.trimEndFunc} trimDuration={this.duration} - setStartTrim={() => { }} - setEndTrim={() => { }} + setStartTrim={emptyFunction} + setEndTrim={emptyFunction} /> </div>; } |