From 7f5cee959475b4e42af3b4ea39dcf92f450fa564 Mon Sep 17 00:00:00 2001 From: bobzel Date: Wed, 24 Mar 2021 04:00:15 -0400 Subject: renamed audioState to mediaState and cleaned up screenshotBox a little. --- src/client/views/nodes/AudioBox.tsx | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) (limited to 'src/client/views/nodes/AudioBox.tsx') diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 393a80648..2f7a6cfd8 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -57,8 +57,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent = this.layoutDoc._height; @observable _paused: boolean = false; - @computed get audioState(): undefined | "recording" | "paused" | "playing" { return this.dataDoc.audioState as (undefined | "recording" | "paused" | "playing"); } - set audioState(value) { this.dataDoc.audioState = value; } + @computed get mediaState(): undefined | "recording" | "paused" | "playing" { return this.dataDoc.mediaState as (undefined | "recording" | "paused" | "playing"); } + set mediaState(value) { this.dataDoc.mediaState = value; } public static SetScrubTime = action((timeInMillisFrom1970: number) => { AudioBox._scrubTime = 0; AudioBox._scrubTime = timeInMillisFrom1970; }); @computed get recordingStart() { return Cast(this.dataDoc[this.props.fieldKey + "-recordingStart"], DateField)?.date.getTime(); } @computed get duration() { return NumCast(this.dataDoc[`${this.fieldKey}-duration`]); } @@ -90,7 +90,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent { return CollectionStackedTimeline.createAnchor(this.rootDoc, this.dataDoc, this.annotationKey, "_timecodeToShow" /* audioStart */, "_timecodeToHide" /* audioEnd */, this._ele?.currentTime || - Cast(this.props.Document._currentTimecode, "number", null) || (this.audioState === "recording" ? (Date.now() - (this.recordingStart || 0)) / 1000 : undefined)) + Cast(this.props.Document._currentTimecode, "number", null) || (this.mediaState === "recording" ? (Date.now() - (this.recordingStart || 0)) / 1000 : undefined)) || this.rootDoc; } @@ -104,7 +104,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent !LinkDocPreview.LinkInfo && this.props.renderDepth !== -1 ? NumCast(this.Document._triggerAudio, null) : undefined, @@ -131,7 +131,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent { const htmlEle = this._ele; - if (this.audioState !== "recording" && htmlEle) { + if (this.mediaState !== "recording" && htmlEle) { htmlEle.duration && htmlEle.duration !== Infinity && runInAction(() => this.dataDoc[this.fieldKey + "-duration"] = htmlEle.duration); this.links.map(l => this.getLinkData(l)).forEach(({ la1, la2, linkTime }) => { if (linkTime > NumCast(this.layoutDoc._currentTimecode) && linkTime < htmlEle.currentTime) { @@ -145,7 +145,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent { this._ele!.pause(); - this.audioState = "paused"; + this.mediaState = "paused"; }); // play audio for documents created during recording @@ -169,7 +169,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent this.audioState = "playing"); + runInAction(() => this.mediaState = "playing"); if (endTime !== this.duration) { this._play = setTimeout(() => this.Pause(), (endTime - seekTimeInSeconds) * 1000); // use setTimeout to play a specific duration } @@ -181,7 +181,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent { - if (this.audioState === "recording") { + if (this.mediaState === "recording") { setTimeout(this.updateRecordTime, 30); if (this._paused) { this._pausedTime += (new Date().getTime() - this._recordStart) / 1000; @@ -204,7 +204,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent this.audioState = "recording"); + runInAction(() => this.mediaState = "recording"); setTimeout(this.updateRecordTime, 0); this._recorder.start(); setTimeout(() => this._recorder && this.stopRecording(), 60 * 60 * 1000); // stop after an hour @@ -224,7 +224,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent this.audioState === "playing"; + playing = () => this.mediaState === "playing"; playLink = (link: Doc) => { const stack = this._stackedTimeline.current; if (link.annotationOn === this.rootDoc) { @@ -387,7 +387,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent - {this.audioState === "recording" || this.audioState === "paused" ? + {this.mediaState === "recording" || this.mediaState === "paused" ?
e.stopPropagation()}>
@@ -405,7 +405,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent
-
+
{this.waveform} -- cgit v1.2.3-70-g09d2