aboutsummaryrefslogtreecommitdiff
path: root/src/client/views/nodes/AudioBox.tsx
diff options
context:
space:
mode:
Diffstat (limited to 'src/client/views/nodes/AudioBox.tsx')
-rw-r--r--src/client/views/nodes/AudioBox.tsx350
1 files changed, 123 insertions, 227 deletions
diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx
index bfc15cea8..81367ed19 100644
--- a/src/client/views/nodes/AudioBox.tsx
+++ b/src/client/views/nodes/AudioBox.tsx
@@ -14,15 +14,15 @@ import { Doc, DocListCast, Opt } from "../../../fields/Doc";
import { documentSchema } from "../../../fields/documentSchemas";
import { makeInterface } from "../../../fields/Schema";
import { ComputedField } from "../../../fields/ScriptField";
-import { Cast, NumCast } from "../../../fields/Types";
+import { Cast, NumCast, DateCast } from "../../../fields/Types";
import { AudioField, nullAudio } from "../../../fields/URLField";
-import { emptyFunction, formatTime, OmitKeys, setupMoveUpEvents, returnFalse } from "../../../Utils";
+import { emptyFunction, formatTime, OmitKeys, returnFalse, setupMoveUpEvents } from "../../../Utils";
import { DocUtils } from "../../documents/Documents";
import { Networking } from "../../Network";
import { CurrentUserUtils } from "../../util/CurrentUserUtils";
import { DragManager } from "../../util/DragManager";
import { SnappingManager } from "../../util/SnappingManager";
-import { CollectionStackedTimeline } from "../collections/CollectionStackedTimeline";
+import { CollectionStackedTimeline, TrimScope } from "../collections/CollectionStackedTimeline";
import { ContextMenu } from "../ContextMenu";
import { ContextMenuProps } from "../ContextMenuItem";
import {
@@ -43,20 +43,21 @@ declare class MediaRecorder {
type AudioDocument = makeInterface<[typeof documentSchema]>;
const AudioDocument = makeInterface(documentSchema);
+enum media_state {
+ PendingRecording = "pendingRecording",
+ Recording = "recording",
+ Paused = "paused",
+ Playing = "playing"
+};
@observer
-export class AudioBox extends ViewBoxAnnotatableComponent<
- ViewBoxAnnotatableProps & FieldViewProps,
- AudioDocument
->(AudioDocument) {
+export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProps & FieldViewProps, AudioDocument>(AudioDocument) {
public static LayoutString(fieldKey: string) { return FieldView.LayoutString(AudioBox, fieldKey); }
public static Enabled = false;
static playheadWidth = 40; // width of playhead
static heightPercent = 75; // height of timeline in percent of height of audioBox.
static Instance: AudioBox;
- static ScopeAll = 2;
- static ScopeClip = 1;
- static ScopeNone = 0;
+ _dropDisposer?: DragManager.DragDropDisposer;
_disposers: { [name: string]: IReactionDisposer } = {};
_ele: HTMLAudioElement | null = null;
_stackedTimeline = React.createRef<CollectionStackedTimeline>();
@@ -68,81 +69,39 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
_stream: MediaStream | undefined;
_start: number = 0;
_play: any = null;
- _ended: boolean = false;
@observable static _scrubTime = 0;
@observable _markerEnd: number = 0;
@observable _position: number = 0;
@observable _waveHeight: Opt<number> = this.layoutDoc._height;
@observable _paused: boolean = false;
- @observable _trimming: number = AudioBox.ScopeNone;
- @observable _trimStart: number = NumCast(this.layoutDoc.clipStart);
- @observable _trimEnd: number | undefined = Cast(this.layoutDoc.clipEnd, "number");
- @computed get clipStart() { return this._trimming === AudioBox.ScopeAll ? 0 : NumCast(this.layoutDoc.clipStart); }
- @computed get clipDuration() {
- return this._trimming === AudioBox.ScopeAll ? NumCast(this.dataDoc[`${this.fieldKey}-duration`]) :
- NumCast(this.layoutDoc.clipEnd, this.clipStart + NumCast(this.dataDoc[`${this.fieldKey}-duration`])) - this.clipStart;
- }
- @computed get clipEnd() { return this.clipStart + this.clipDuration; }
- @computed get trimStart() { return this._trimming !== AudioBox.ScopeNone ? this._trimStart : NumCast(this.layoutDoc.clipStart); }
- @computed get trimDuration() { return this.trimEnd - this.trimStart; }
- @computed get trimEnd() {
- return this._trimming !== AudioBox.ScopeNone && this._trimEnd !== undefined ? this._trimEnd : NumCast(this.layoutDoc.clipEnd, this.clipDuration);
- }
+ @computed get recordingStart() { return DateCast(this.dataDoc[this.fieldKey + "-recordingStart"])?.date.getTime(); }
+ @computed get rawDuration() { return NumCast(this.dataDoc[`${this.fieldKey}-duration`]); }
+ @computed get anchorDocs() { return DocListCast(this.dataDoc[this.annotationKey]); }
+ @computed get links() { return DocListCast(this.dataDoc.links); }
+ @computed get pauseTime() { return this._pauseEnd - this._pauseStart; } // total time paused to update the correct time
+ @computed get heightPercent() { return AudioBox.heightPercent; }
+ @computed get mediaState() { return this.layoutDoc.mediaState as media_state; }
+ set mediaState(value) { this.layoutDoc.mediaState = value; }
- @computed get mediaState():
- | undefined
- | "pendingRecording"
- | "recording"
- | "paused"
- | "playing" {
- return this.layoutDoc.mediaState as
- | undefined
- | "pendingRecording"
- | "recording"
- | "paused"
- | "playing";
- }
- set mediaState(value) {
- this.layoutDoc.mediaState = value;
- }
- public static SetScrubTime = action((timeInMillisFrom1970: number) => {
- AudioBox._scrubTime = 0;
- AudioBox._scrubTime = timeInMillisFrom1970;
- });
- @computed get recordingStart() {
- return Cast(
- this.dataDoc[this.props.fieldKey + "-recordingStart"],
- DateField
- )?.date.getTime();
- }
- @computed get rawDuration() {
- return NumCast(this.dataDoc[`${this.fieldKey}-duration`]);
- }
- @computed get anchorDocs() {
- return DocListCast(this.dataDoc[this.annotationKey]);
- }
- @computed get links() {
- return DocListCast(this.dataDoc.links);
- }
- @computed get pauseTime() {
- return this._pauseEnd - this._pauseStart;
- } // total time paused to update the correct time
- @computed get heightPercent() {
- return AudioBox.heightPercent;
- }
+ get timeline() { return this._stackedTimeline.current; }
constructor(props: Readonly<ViewBoxAnnotatableProps & FieldViewProps>) {
super(props);
AudioBox.Instance = this;
}
+ public static SetScrubTime = action((timeInMillisFrom1970: number) => {
+ AudioBox._scrubTime = 0;
+ AudioBox._scrubTime = timeInMillisFrom1970;
+ });
+
getLinkData(l: Doc) {
let la1 = l.anchor1 as Doc;
let la2 = l.anchor2 as Doc;
const linkTime =
- this._stackedTimeline.current?.anchorStart(la2) ||
- this._stackedTimeline.current?.anchorStart(la1) ||
+ this.timeline?.anchorStart(la2) ||
+ this.timeline?.anchorStart(la1) ||
0;
if (Doc.AreProtosEqual(la1, this.dataDoc)) {
la1 = l.anchor2 as Doc;
@@ -152,47 +111,42 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
}
getAnchor = () => {
- return (
- CollectionStackedTimeline.createAnchor(
- this.rootDoc,
- this.dataDoc,
- this.annotationKey,
- "_timecodeToShow" /* audioStart */,
- "_timecodeToHide" /* audioEnd */,
- this._ele?.currentTime ||
- Cast(this.props.Document._currentTimecode, "number", null) ||
- (this.mediaState === "recording"
- ? (Date.now() - (this.recordingStart || 0)) / 1000
- : undefined)
- ) || this.rootDoc
- );
+ return CollectionStackedTimeline.createAnchor(
+ this.rootDoc,
+ this.dataDoc,
+ this.annotationKey,
+ "_timecodeToShow" /* audioStart */,
+ "_timecodeToHide" /* audioEnd */,
+ this._ele?.currentTime ||
+ Cast(this.props.Document._currentTimecode, "number", null) ||
+ (this.mediaState === media_state.Recording
+ ? (Date.now() - (this.recordingStart || 0)) / 1000
+ : undefined)
+ ) || this.rootDoc;
}
componentWillUnmount() {
- this.dropDisposer?.();
+ this._dropDisposer?.();
Object.values(this._disposers).forEach((disposer) => disposer?.());
const ind = DocUtils.ActiveRecordings.indexOf(this);
ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1);
}
- private dropDisposer?: DragManager.DragDropDisposer;
@action
componentDidMount() {
this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link.
- this.mediaState = this.path ? "paused" : undefined;
+ this.mediaState = this.path ? media_state.Paused : undefined as any as media_state;
this.path && this.setAnchorTime(NumCast(this.layoutDoc.clipStart));
this.path && this.timecodeChanged();
this._disposers.triggerAudio = reaction(
- () =>
- !LinkDocPreview.LinkInfo && this.props.renderDepth !== -1
- ? NumCast(this.Document._triggerAudio, null)
- : undefined,
+ () => !LinkDocPreview.LinkInfo && this.props.renderDepth !== -1
+ ? NumCast(this.Document._triggerAudio, null)
+ : undefined,
(start) =>
- start !== undefined &&
- setTimeout(() => {
+ start !== undefined && setTimeout(() => {
this.playFrom(start);
setTimeout(() => {
this.Document._currentTimecode = start;
@@ -203,13 +157,11 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
);
this._disposers.audioStop = reaction(
- () =>
- this.props.renderDepth !== -1 && !LinkDocPreview.LinkInfo
- ? Cast(this.Document._audioStop, "number", null)
- : undefined,
+ () => this.props.renderDepth !== -1 && !LinkDocPreview.LinkInfo
+ ? Cast(this.Document._audioStop, "number", null)
+ : undefined,
(audioStop) =>
- audioStop !== undefined &&
- setTimeout(() => {
+ audioStop !== undefined && setTimeout(() => {
this.Pause();
setTimeout(() => (this.Document._audioStop = undefined), 10);
}), // wait for mainCont and try again to play
@@ -220,27 +172,25 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
// for updating the timecode
@action
timecodeChanged = () => {
- const htmlEle = this._ele;
- if (this.mediaState !== "recording" && htmlEle) {
+ if (this.mediaState !== media_state.Recording && this._ele) {
this.links
.map((l) => this.getLinkData(l))
.forEach(({ la1, la2, linkTime }) => {
if (
linkTime > NumCast(this.layoutDoc._currentTimecode) &&
- linkTime < htmlEle.currentTime
+ linkTime < this._ele!.currentTime
) {
Doc.linkFollowHighlight(la1);
}
});
- this.layoutDoc._currentTimecode = htmlEle.currentTime;
-
+ this.layoutDoc._currentTimecode = this._ele.currentTime;
}
}
// pause play back
Pause = action(() => {
this._ele!.pause();
- this.mediaState = "paused";
+ this.mediaState = media_state.Paused;
});
// play audio for documents created during recording
@@ -251,32 +201,30 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
// play back the audio from time
@action
- playFrom = (seekTimeInSeconds: number, endTime: number = this.trimEnd, fullPlay: boolean = false) => {
+ playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false): any => {
clearTimeout(this._play);
if (Number.isNaN(this._ele?.duration)) {
setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500);
- } else if (this._ele && AudioBox.Enabled) {
- if (seekTimeInSeconds < 0) {
- if (seekTimeInSeconds > -1) {
- setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000);
- } else {
- this.Pause();
- }
- } else if (this.trimStart <= endTime && seekTimeInSeconds <= this.trimEnd) {
- const start = Math.max(this.trimStart, seekTimeInSeconds);
- const end = Math.min(this.trimEnd, endTime);
+ }
+ else if (this.timeline && this._ele && AudioBox.Enabled) {
+ const end = Math.min(this.timeline.trimEnd, endTime ?? this.timeline.trimEnd);
+ const start = Math.max(this.timeline.trimStart, seekTimeInSeconds);
+ if (seekTimeInSeconds >= 0 && this.timeline.trimStart <= end && seekTimeInSeconds <= this.timeline.trimEnd) {
this._ele.currentTime = start;
this._ele.play();
- runInAction(() => (this.mediaState = "playing"));
- if (endTime !== this.clipDuration) {
- this._play = setTimeout(
+ runInAction(() => this.mediaState = media_state.Playing);
+ if (end !== this.timeline.clipDuration) {
+ return this._play = setTimeout(
() => {
- this._ended = fullPlay ? true : this._ended;
+ if (fullPlay) this.setAnchorTime(this.timeline!.trimStart);
this.Pause();
},
(end - start) * 1000
); // use setTimeout to play a specific duration
}
+ }
+ if (seekTimeInSeconds < 0 && seekTimeInSeconds > -1) {
+ setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000);
} else {
this.Pause();
}
@@ -285,7 +233,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
// update the recording time
updateRecordTime = () => {
- if (this.mediaState === "recording") {
+ if (this.mediaState === media_state.Recording) {
setTimeout(this.updateRecordTime, 30);
if (this._paused) {
this._pausedTime += (new Date().getTime() - this._recordStart) / 1000;
@@ -300,22 +248,19 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
recordAudioAnnotation = async () => {
this._stream = await navigator.mediaDevices.getUserMedia({ audio: true });
this._recorder = new MediaRecorder(this._stream);
- this.dataDoc[this.props.fieldKey + "-recordingStart"] = new DateField(
- new Date()
- );
+ this.dataDoc[this.fieldKey + "-recordingStart"] = new DateField();
DocUtils.ActiveRecordings.push(this);
this._recorder.ondataavailable = async (e: any) => {
const [{ result }] = await Networking.UploadFilesToServer(e.data);
if (!(result instanceof Error)) {
- this.props.Document[this.props.fieldKey] = new AudioField(result.accessPaths.agnostic.client);
- if (this._trimEnd === undefined) this._trimEnd = this.clipDuration;
+ this.props.Document[this.fieldKey] = new AudioField(result.accessPaths.agnostic.client);
}
};
this._recordStart = new Date().getTime();
- runInAction(() => (this.mediaState = "recording"));
+ runInAction(() => this.mediaState = media_state.Recording);
setTimeout(this.updateRecordTime, 0);
this._recorder.start();
- setTimeout(() => this._recorder && this.stopRecording(), 60 * 60 * 1000); // stop after an hour
+ setTimeout(() => this.stopRecording(), 60 * 60 * 1000); // stop after an hour
}
// context menu
@@ -353,17 +298,16 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
// stops the recording
stopRecording = action(() => {
- this._recorder.stop();
- this._recorder = undefined;
- this.dataDoc[this.fieldKey + "-duration"] =
- (new Date().getTime() - this._recordStart - this.pauseTime) / 1000;
- this.mediaState = "paused";
- this._trimEnd = this.clipDuration;
- this.layoutDoc.clipStart = 0;
- this.layoutDoc.clipEnd = this.clipDuration;
- this._stream?.getAudioTracks()[0].stop();
- const ind = DocUtils.ActiveRecordings.indexOf(this);
- ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1);
+ if (this._recorder) {
+ this._recorder.stop();
+ this._recorder = undefined;
+ this.dataDoc[this.fieldKey + "-duration"] =
+ (new Date().getTime() - this._recordStart - this.pauseTime) / 1000;
+ this.mediaState = media_state.Paused;
+ this._stream?.getAudioTracks()[0].stop();
+ const ind = DocUtils.ActiveRecordings.indexOf(this);
+ ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1);
+ }
});
// button for starting and stopping the recording
@@ -376,16 +320,9 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
// for play button
Play = (e?: any) => {
- let start;
- if (this._ended || this._ele!.currentTime === this.clipDuration) {
- start = NumCast(this.layoutDoc.clipStart);
- this._ended = false;
- }
- else {
- start = this._ele!.currentTime;
- }
-
- this.playFrom(start, this.trimEnd, true);
+ const eleTime = this._ele!.currentTime;
+ const start = eleTime === this.timeline?.trimDuration ? NumCast(this.layoutDoc.trimStart) : eleTime;
+ this.playFrom(start, undefined, true);
e?.stopPropagation?.();
}
@@ -402,7 +339,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
);
Doc.GetProto(newDoc).recordingSource = this.dataDoc;
Doc.GetProto(newDoc).recordingStart = ComputedField.MakeFunction(
- `self.recordingSource["${this.props.fieldKey}-recordingStart"]`
+ `self.recordingSource["${this.fieldKey}-recordingStart"]`
);
Doc.GetProto(newDoc).mediaState = ComputedField.MakeFunction(
"self.recordingSource.mediaState"
@@ -420,7 +357,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
// returns the path of the audio file
@computed get path() {
- const field = Cast(this.props.Document[this.props.fieldKey], AudioField);
+ const field = Cast(this.props.Document[this.fieldKey], AudioField);
const path = field instanceof AudioField ? field.url.href : "";
return path === nullAudio ? "" : path;
}
@@ -460,68 +397,33 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
e.stopPropagation();
}
- playing = () => this.mediaState === "playing";
+ playing = () => this.mediaState === media_state.Playing;
playLink = (link: Doc) => {
- const stack = this._stackedTimeline.current;
if (link.annotationOn === this.rootDoc) {
if (!this.layoutDoc.dontAutoPlayFollowedLinks) {
- this.playFrom(stack?.anchorStart(link) || 0, stack?.anchorEnd(link));
+ this.playFrom(this.timeline?.anchorStart(link) || 0, this.timeline?.anchorEnd(link));
} else {
this._ele!.currentTime = this.layoutDoc._currentTimecode =
- stack?.anchorStart(link) || 0;
+ this.timeline?.anchorStart(link) || 0;
}
} else {
this.links
.filter((l) => l.anchor1 === link || l.anchor2 === link)
.forEach((l) => {
const { la1, la2 } = this.getLinkData(l);
- const startTime = stack?.anchorStart(la1) || stack?.anchorStart(la2);
- const endTime = stack?.anchorEnd(la1) || stack?.anchorEnd(la2);
+ const startTime = this.timeline?.anchorStart(la1) || this.timeline?.anchorStart(la2);
+ const endTime = this.timeline?.anchorEnd(la1) || this.timeline?.anchorEnd(la2);
if (startTime !== undefined) {
if (!this.layoutDoc.dontAutoPlayFollowedLinks) {
- endTime
- ? this.playFrom(startTime, endTime)
- : this.playFrom(startTime);
+ this.playFrom(startTime, endTime);
} else {
- this._ele!.currentTime = this.layoutDoc._currentTimecode =
- startTime;
+ this._ele!.currentTime = this.layoutDoc._currentTimecode = startTime;
}
}
});
}
}
- // shows trim controls
- @action
- startTrim = (scope: number) => {
- if (this.mediaState === "playing") {
- this.Pause();
- }
- this._trimming = scope;
- }
-
- // hides trim controls and displays new clip
- @undoBatch
- finishTrim = action(() => {
- if (this.mediaState === "playing") {
- this.Pause();
- }
- this.layoutDoc.clipStart = this.trimStart;
- this.layoutDoc.clipEnd = this.trimEnd;
- this.setAnchorTime(Math.max(Math.min(this.trimEnd, this._ele!.currentTime), this.trimStart));
- this._trimming = AudioBox.ScopeNone;
- });
-
- @action
- setStartTrim = (newStart: number) => {
- this._trimStart = newStart;
- }
-
- @action
- setEndTrim = (newEnd: number) => {
- this._trimEnd = newEnd;
- }
-
isActiveChild = () => this._isAnyChildContentActive;
timelineWhenChildContentsActiveChanged = (isActive: boolean) =>
this.props.whenChildContentsActiveChanged(
@@ -543,9 +445,6 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
this.heightPercent) /
100 // panelHeight * heightPercent is player height. * heightPercent is timeline height (as per css inline)
timelineWidth = () => this.props.PanelWidth() - AudioBox.playheadWidth;
- trimEndFunc = () => this.trimEnd;
- trimStartFunc = () => this.trimStart;
- trimDurationFunc = () => this.trimDuration;
@computed get renderTimeline() {
return (
<CollectionStackedTimeline
@@ -576,31 +475,28 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
PanelWidth={this.timelineWidth}
PanelHeight={this.timelineHeight}
rawDuration={this.rawDuration}
-
- // this edits the entire waveform when trimming is activated
- clipStart={this._trimming === AudioBox.ScopeAll ? 0 : this.clipStart}
- clipEnd={this._trimming === AudioBox.ScopeAll ? this.rawDuration : this.clipEnd}
- clipDuration={this._trimming === AudioBox.ScopeAll ? this.rawDuration : this.clipDuration}
- // this edits just the current waveform clip when trimming is activated
- // clipStart={this.clipStart}
- // clipEnd={this.clipEnd}
- // clipDuration={this.duration}
-
- trimming={this._trimming !== AudioBox.ScopeNone}
- trimStart={this.trimStartFunc}
- trimEnd={this.trimEndFunc}
- trimDuration={this.trimDurationFunc}
- setStartTrim={this.setStartTrim}
- setEndTrim={this.setEndTrim}
/>
);
}
+ // hides trim controls and displays new clip
+ @undoBatch
+ finishTrim = action(() => {
+ this.Pause();
+ this.setAnchorTime(Math.max(Math.min(this.timeline?.trimEnd || 0, this._ele!.currentTime), this.timeline?.trimStart || 0));
+ this.timeline?.StopTrimming();
+ });
+ startTrim = (scope: TrimScope) => {
+ this.Pause();
+ this.timeline?.StartTrimming(scope);
+ }
+
onClipPointerDown = (e: React.PointerEvent) => {
- setupMoveUpEvents(this, e, returnFalse, returnFalse, action((e: PointerEvent, doubleTap?: boolean) => {
+ this.timeline && setupMoveUpEvents(this, e, returnFalse, returnFalse, action((e: PointerEvent, doubleTap?: boolean) => {
if (doubleTap) {
- this.startTrim(AudioBox.ScopeAll);
- } else {
- this._trimming !== AudioBox.ScopeNone ? this.finishTrim() : this.startTrim(AudioBox.ScopeClip);
+ this.startTrim(TrimScope.All);
+ } else if (this.timeline) {
+ this.Pause();
+ this.timeline.IsTrimming !== TrimScope.None ? this.finishTrim() : this.startTrim(TrimScope.Clip);
}
}));
}
@@ -613,12 +509,12 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
return (
<div
ref={r => {
- if (r && this._stackedTimeline.current) {
- this.dropDisposer?.();
- this.dropDisposer = DragManager.MakeDropTarget(r,
+ if (r && this.timeline) {
+ this._dropDisposer?.();
+ this._dropDisposer = DragManager.MakeDropTarget(r,
(e, de) => {
const [xp, yp] = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y);
- de.complete.docDragData && this._stackedTimeline.current!.internalDocDrop(e, de, de.complete.docDragData, xp);
+ de.complete.docDragData && this.timeline!.internalDocDrop(e, de, de.complete.docDragData, xp);
}
, this.layoutDoc, undefined);
}
@@ -644,7 +540,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
size={this.props.PanelHeight() < 36 ? "1x" : "2x"}
/>
</div>
- {this.mediaState === "recording" || this.mediaState === "paused" ? (
+ {this.mediaState === media_state.Recording || this.mediaState === media_state.Playing ? (
<div className="recording" onClick={(e) => e.stopPropagation()}>
<div className="recording-buttons" onClick={this.recordClick}>
<FontAwesomeIcon
@@ -694,22 +590,22 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
>
<div
className="audiobox-buttons"
- title={this.mediaState === "paused" ? "play" : "pause"}
- onClick={this.mediaState === "paused" ? this.Play : this.Pause}
+ title={this.mediaState === media_state.Paused ? "play" : "pause"}
+ onClick={this.mediaState === media_state.Paused ? this.Play : this.Pause}
>
{" "}
<FontAwesomeIcon
- icon={this.mediaState === "paused" ? "play" : "pause"}
+ icon={this.mediaState === media_state.Paused ? "play" : "pause"}
size={"1x"}
/>
</div>
<div
className="audiobox-buttons"
- title={this._trimming !== AudioBox.ScopeNone ? "finish" : "trim"}
+ title={this.timeline?.IsTrimming !== TrimScope.None ? "finish" : "trim"}
onPointerDown={this.onClipPointerDown}
>
<FontAwesomeIcon
- icon={this._trimming !== AudioBox.ScopeNone ? "check" : "cut"}
+ icon={this.timeline?.IsTrimming !== TrimScope.None ? "check" : "cut"}
size={"1x"}
/>
</div>
@@ -727,10 +623,10 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
</div>
{this.audio}
<div className="audioBox-current-time">
- {formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this.clipStart)))}
+ {this.timeline && formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this.timeline.clipStart)))}
</div>
<div className="audioBox-total-time">
- {formatTime(Math.round(NumCast(this.clipDuration)))}
+ {this.timeline && formatTime(Math.round(NumCast(this.timeline?.clipDuration)))}
</div>
</div>
</div>
@@ -738,4 +634,4 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
</div>
);
}
-}
+} \ No newline at end of file