aboutsummaryrefslogtreecommitdiff
path: root/src/client/views/nodes/AudioBox.tsx
diff options
context:
space:
mode:
Diffstat (limited to 'src/client/views/nodes/AudioBox.tsx')
-rw-r--r--src/client/views/nodes/AudioBox.tsx1059
1 files changed, 524 insertions, 535 deletions
diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx
index 93377f1dc..8437736ae 100644
--- a/src/client/views/nodes/AudioBox.tsx
+++ b/src/client/views/nodes/AudioBox.tsx
@@ -1,136 +1,130 @@
-import React = require("react");
-import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
-import {
- action,
- computed,
- IReactionDisposer,
- observable,
- reaction,
- runInAction
-} from "mobx";
-import { observer } from "mobx-react";
-import { DateField } from "../../../fields/DateField";
-import { Doc, DocListCast, Opt } from "../../../fields/Doc";
-import { ComputedField } from "../../../fields/ScriptField";
-import { Cast, NumCast } from "../../../fields/Types";
-import { AudioField, nullAudio } from "../../../fields/URLField";
-import { emptyFunction, formatTime } from "../../../Utils";
-import { DocUtils } from "../../documents/Documents";
-import { Networking } from "../../Network";
-import { CurrentUserUtils } from "../../util/CurrentUserUtils";
-import { SnappingManager } from "../../util/SnappingManager";
-import { CollectionStackedTimeline } from "../collections/CollectionStackedTimeline";
-import { ContextMenu } from "../ContextMenu";
-import { ContextMenuProps } from "../ContextMenuItem";
-import {
- ViewBoxAnnotatableComponent,
- ViewBoxAnnotatableProps
-} from "../DocComponent";
-import { Colors } from "../global/globalEnums";
-import "./AudioBox.scss";
-import { FieldView, FieldViewProps } from "./FieldView";
-import { LinkDocPreview } from "./LinkDocPreview";
-
+import React = require('react');
+import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
+import { action, computed, IReactionDisposer, observable, runInAction } from 'mobx';
+import { observer } from 'mobx-react';
+import { DateField } from '../../../fields/DateField';
+import { Doc, DocListCast } from '../../../fields/Doc';
+import { ComputedField } from '../../../fields/ScriptField';
+import { Cast, DateCast, NumCast } from '../../../fields/Types';
+import { AudioField, nullAudio } from '../../../fields/URLField';
+import { emptyFunction, formatTime, OmitKeys, returnFalse, setupMoveUpEvents } from '../../../Utils';
+import { DocUtils } from '../../documents/Documents';
+import { Networking } from '../../Network';
+import { DragManager } from '../../util/DragManager';
+import { undoBatch } from '../../util/UndoManager';
+import { CollectionStackedTimeline, TrimScope } from '../collections/CollectionStackedTimeline';
+import { ContextMenu } from '../ContextMenu';
+import { ContextMenuProps } from '../ContextMenuItem';
+import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from '../DocComponent';
+import './AudioBox.scss';
+import { FieldView, FieldViewProps } from './FieldView';
+
+/**
+ * AudioBox
+ * Main component: AudioBox.tsx
+ * Supporting Components: CollectionStackedTimeline, AudioWaveform
+ *
+ * AudioBox is a node that supports the recording and playback of audio files in Dash.
+ * When an audio file is importeed into Dash, it is immediately rendered as an AudioBox document.
+ * When a blank AudioBox node is created in Dash, audio recording controls are displayed and the user can start a recording which can be paused or stopped, and can use dictation to create a text transcript.
+ * Recording is done using the MediaDevices API to access the user's device microphone (see recordAudioAnnotation below)
+ * CollectionStackedTimeline handles AudioBox and VideoBox shared behavior, but AudioBox handles playing, pausing, etc because it contains <audio> element
+ * User can trim audio: nondestructive, just sets new bounds for playback and rendering timelin
+ */
+
+// used as a wrapper class for MediaStream from MediaDevices API
declare class MediaRecorder {
constructor(e: any); // whatever MediaRecorder has
}
+
+enum media_state {
+ PendingRecording = 'pendingRecording',
+ Recording = 'recording',
+ Paused = 'paused',
+ Playing = 'playing',
+}
+
@observer
export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProps & FieldViewProps>() {
public static LayoutString(fieldKey: string) {
return FieldView.LayoutString(AudioBox, fieldKey);
}
public static Enabled = false;
- static playheadWidth = 40; // width of playhead
- static heightPercent = 75; // height of timeline in percent of height of audioBox.
- static Instance: AudioBox;
+ static topControlsHeight = 30; // height of upper controls above timeline
+ static bottomControlsHeight = 20; // height of lower controls below timeline
+
+ _dropDisposer?: DragManager.DragDropDisposer;
_disposers: { [name: string]: IReactionDisposer } = {};
- _ele: HTMLAudioElement | null = null;
- _stackedTimeline = React.createRef<CollectionStackedTimeline>();
- _recorder: any;
+ _ele: HTMLAudioElement | null = null; // <audio> ref
+ _recorder: any; // MediaRecorder
_recordStart = 0;
- _pauseStart = 0;
- _pauseEnd = 0;
+ _pauseStart = 0; // time when recording is paused (used to keep track of recording timecodes)
_pausedTime = 0;
- _stream: MediaStream | undefined;
- _start: number = 0;
- _play: any = null;
- _ended: boolean = false;
-
- @observable static _scrubTime = 0;
- @observable _markerEnd: number = 0;
- @observable _position: number = 0;
- @observable _waveHeight: Opt<number> = NumCast(this.layoutDoc._height);
- @observable _paused: boolean = false;
- @observable _trimming: boolean = false;
- @observable _trimStart: number = NumCast(this.layoutDoc.clipStart) ? NumCast(this.layoutDoc.clipStart) : 0;
- @observable _trimEnd: number = NumCast(this.layoutDoc.clipEnd) ? NumCast(this.layoutDoc.clipEnd)
- : this.duration;
-
- @computed get mediaState():
- | undefined
- | "pendingRecording"
- | "recording"
- | "paused"
- | "playing" {
- return this.dataDoc.mediaState as
- | undefined
- | "pendingRecording"
- | "recording"
- | "paused"
- | "playing";
- }
- set mediaState(value) {
- this.dataDoc.mediaState = value;
- }
- public static SetScrubTime = action((timeInMillisFrom1970: number) => {
- AudioBox._scrubTime = 0;
- AudioBox._scrubTime = timeInMillisFrom1970;
- });
+ _stream: MediaStream | undefined; // passed to MediaRecorder, records device input audio
+ _play: any = null; // timeout for playback
+
+ @observable _stackedTimeline: any; // CollectionStackedTimeline ref
+ @observable _finished: boolean = false; // has playback reached end of clip
+ @observable _volume: number = 1;
+ @observable _muted: boolean = false;
+ @observable _paused: boolean = false; // is recording paused
+ // @observable rawDuration: number = 0; // computed from the length of the audio element when loaded
@computed get recordingStart() {
- return Cast(
- this.dataDoc[this.props.fieldKey + "-recordingStart"],
- DateField
- )?.date.getTime();
+ return DateCast(this.dataDoc[this.fieldKey + '-recordingStart'])?.date.getTime();
}
- @computed get duration() {
+ @computed get rawDuration() {
return NumCast(this.dataDoc[`${this.fieldKey}-duration`]);
+ } // bcz: shouldn't be needed since it's computed from audio element
+ // mehek: not 100% sure but i think due to the order in which things are loaded this is necessary ^^
+ // if you get rid of it and set the value to 0 the timeline and waveform will set their bounds incorrectly
+
+ @computed get miniPlayer() {
+ return this.props.PanelHeight() < 50;
+ } // used to collapse timeline when node is shrunk
+ @computed get links() {
+ return DocListCast(this.dataDoc.links);
}
- @computed get trimDuration() {
- return this._trimming && this._trimEnd ? this.duration : this._trimEnd - this._trimStart;
+ @computed get mediaState() {
+ return this.dataDoc.mediaState as media_state;
}
- @computed get anchorDocs() {
- return DocListCast(this.dataDoc[this.annotationKey]);
+ @computed get path() {
+ // returns the path of the audio file
+ const path = Cast(this.props.Document[this.fieldKey], AudioField, null)?.url.href || '';
+ return path === nullAudio ? '' : path;
}
- @computed get links() {
- return DocListCast(this.dataDoc.links);
+ set mediaState(value) {
+ this.dataDoc.mediaState = value;
}
- @computed get pauseTime() {
- return this._pauseEnd - this._pauseStart;
- } // total time paused to update the correct time
- @computed get heightPercent() {
- return AudioBox.heightPercent;
+
+ @computed get timeline() {
+ return this._stackedTimeline;
+ } // returns CollectionStackedTimeline ref
+
+ componentWillUnmount() {
+ this.removeCurrentlyPlaying();
+ this._dropDisposer?.();
+ Object.values(this._disposers).forEach(disposer => disposer?.());
+
+ this.mediaState === media_state.Recording && this.stopRecording();
}
- constructor(props: Readonly<ViewBoxAnnotatableProps & FieldViewProps>) {
- super(props);
- AudioBox.Instance = this;
+ @action
+ componentDidMount() {
+ this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link.
- if (this.duration === undefined) {
- runInAction(
- () =>
- (this.Document[this.fieldKey + "-duration"] = this.Document.duration)
- );
+ if (this.path) {
+ this.mediaState = media_state.Paused;
+ this.setPlayheadTime(NumCast(this.layoutDoc.clipStart));
+ } else {
+ this.mediaState = undefined as any as media_state;
}
}
getLinkData(l: Doc) {
let la1 = l.anchor1 as Doc;
let la2 = l.anchor2 as Doc;
- const linkTime =
- this._stackedTimeline.current?.anchorStart(la2) ||
- this._stackedTimeline.current?.anchorStart(la1) ||
- 0;
+ const linkTime = this.timeline?.anchorStart(la2) || this.timeline?.anchorStart(la1) || 0;
if (Doc.AreProtosEqual(la1, this.dataDoc)) {
la1 = l.anchor2 as Doc;
la2 = l.anchor1 as Doc;
@@ -144,416 +138,524 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this.rootDoc,
this.dataDoc,
this.annotationKey,
- "_timecodeToShow" /* audioStart */,
- "_timecodeToHide" /* audioEnd */,
- this._ele?.currentTime ||
- Cast(this.props.Document._currentTimecode, "number", null) ||
- (this.mediaState === "recording"
- ? (Date.now() - (this.recordingStart || 0)) / 1000
- : undefined)
+ '_timecodeToShow' /* audioStart */,
+ '_timecodeToHide' /* audioEnd */,
+ this._ele?.currentTime || Cast(this.props.Document._currentTimecode, 'number', null) || (this.mediaState === media_state.Recording ? (Date.now() - (this.recordingStart || 0)) / 1000 : undefined)
) || this.rootDoc
);
- }
-
- componentWillUnmount() {
- Object.values(this._disposers).forEach((disposer) => disposer?.());
- const ind = DocUtils.ActiveRecordings.indexOf(this);
- ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1);
- }
-
- @action
- componentDidMount() {
- this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link.
-
- this.mediaState = this.path ? "paused" : undefined;
-
- this.layoutDoc.clipStart = this.layoutDoc.clipStart ? this.layoutDoc.clipStart : 0;
- this.layoutDoc.clipEnd = this.layoutDoc.clipEnd ? this.layoutDoc.clipEnd : this.duration ? this.duration : undefined;
-
- this.path && this.setAnchorTime(NumCast(this.layoutDoc.clipStart));
- this.path && this.timecodeChanged();
-
- this._disposers.triggerAudio = reaction(
- () =>
- !LinkDocPreview.LinkInfo && this.props.renderDepth !== -1
- ? NumCast(this.Document._triggerAudio, null)
- : undefined,
- (start) =>
- start !== undefined &&
- setTimeout(() => {
- this.playFrom(start);
- setTimeout(() => {
- this.Document._currentTimecode = start;
- this.Document._triggerAudio = undefined;
- }, 10);
- }), // wait for mainCont and try again to play
- { fireImmediately: true }
- );
+ };
- this._disposers.audioStop = reaction(
- () =>
- this.props.renderDepth !== -1 && !LinkDocPreview.LinkInfo
- ? Cast(this.Document._audioStop, "number", null)
- : undefined,
- (audioStop) =>
- audioStop !== undefined &&
- setTimeout(() => {
- this.Pause();
- setTimeout(() => (this.Document._audioStop = undefined), 10);
- }), // wait for mainCont and try again to play
- { fireImmediately: true }
- );
- }
-
- // for updating the timecode
+ // updates timecode and shows it in timeline, follows links at time
@action
timecodeChanged = () => {
- const htmlEle = this._ele;
- if (this.mediaState !== "recording" && htmlEle) {
- htmlEle.duration &&
- htmlEle.duration !== Infinity &&
- runInAction(
- () => (this.dataDoc[this.fieldKey + "-duration"] = htmlEle.duration)
- );
- this.layoutDoc.clipEnd = this.layoutDoc.clipEnd ? Math.min(this.duration, NumCast(this.layoutDoc.clipEnd)) : this.duration;
- this._trimEnd = this._trimEnd ? Math.min(this.duration, this._trimEnd) : this.duration;
+ if (this.mediaState !== media_state.Recording && this._ele) {
this.links
- .map((l) => this.getLinkData(l))
+ .map(l => this.getLinkData(l))
.forEach(({ la1, la2, linkTime }) => {
- if (
- linkTime > NumCast(this.layoutDoc._currentTimecode) &&
- linkTime < htmlEle.currentTime
- ) {
+ if (linkTime > NumCast(this.layoutDoc._currentTimecode) && linkTime < this._ele!.currentTime) {
Doc.linkFollowHighlight(la1);
}
});
- this.layoutDoc._currentTimecode = htmlEle.currentTime;
-
+ this.layoutDoc._currentTimecode = this._ele.currentTime;
+ this.timeline?.scrollToTime(NumCast(this.layoutDoc._currentTimecode));
}
- }
+ };
- // pause play back
- Pause = action(() => {
- this._ele!.pause();
- this.mediaState = "paused";
- });
-
- // play audio for documents created during recording
- playFromTime = (absoluteTime: number) => {
- this.recordingStart &&
- this.playFrom((absoluteTime - this.recordingStart) / 1000);
- }
-
- // play back the audio from time
+ // play back the audio from seekTimeInSeconds, fullPlay tells whether clip is being played to end vs link range
@action
- playFrom = (seekTimeInSeconds: number, endTime: number = this._trimEnd, fullPlay: boolean = false) => {
- clearTimeout(this._play);
+ playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false) => {
+ clearTimeout(this._play); // abort any previous clip ending
if (Number.isNaN(this._ele?.duration)) {
+ // audio element isn't loaded yet... wait 1/2 second and try again
setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500);
- } else if (this._ele && AudioBox.Enabled) {
- if (seekTimeInSeconds < 0) {
- if (seekTimeInSeconds > -1) {
- setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000);
- } else {
- this.Pause();
- }
- } else if (this._trimStart <= endTime && seekTimeInSeconds <= this._trimEnd) {
- const start = Math.max(this._trimStart, seekTimeInSeconds);
- const end = Math.min(this._trimEnd, endTime);
+ } else if (this.timeline && this._ele && AudioBox.Enabled) {
+ // trimBounds override requested playback bounds
+ const end = Math.min(this.timeline.trimEnd, endTime ?? this.timeline.trimEnd);
+ const start = Math.max(this.timeline.trimStart, seekTimeInSeconds);
+ // checks if times are within clip range
+ if (seekTimeInSeconds >= 0 && this.timeline.trimStart <= end && seekTimeInSeconds <= this.timeline.trimEnd) {
this._ele.currentTime = start;
this._ele.play();
- runInAction(() => (this.mediaState = "playing"));
- if (endTime !== this.duration) {
- this._play = setTimeout(
- () => {
- this._ended = fullPlay ? true : this._ended;
- this.Pause();
- },
- (end - start) * 1000
- ); // use setTimeout to play a specific duration
- }
+ this.mediaState = media_state.Playing;
+ this.addCurrentlyPlaying();
+ this._play = setTimeout(() => {
+ // need to keep track of if end of clip is reached so on next play, clip restarts
+ if (fullPlay) this._finished = true;
+ // removes from currently playing if playback has reached end of range marker
+ else this.removeCurrentlyPlaying();
+ this.Pause();
+ }, (end - start) * 1000);
} else {
this.Pause();
}
}
- }
+ };
+
+ // removes from currently playing display
+ @action
+ removeCurrentlyPlaying = () => {
+ if (CollectionStackedTimeline.CurrentlyPlaying) {
+ const index = CollectionStackedTimeline.CurrentlyPlaying.indexOf(this.layoutDoc);
+ index !== -1 && CollectionStackedTimeline.CurrentlyPlaying.splice(index, 1);
+ }
+ };
+
+ // adds doc to currently playing display
+ @action
+ addCurrentlyPlaying = () => {
+ if (!CollectionStackedTimeline.CurrentlyPlaying) {
+ CollectionStackedTimeline.CurrentlyPlaying = [];
+ }
+ if (CollectionStackedTimeline.CurrentlyPlaying.indexOf(this.layoutDoc) === -1) {
+ CollectionStackedTimeline.CurrentlyPlaying.push(this.layoutDoc);
+ }
+ };
// update the recording time
updateRecordTime = () => {
- if (this.mediaState === "recording") {
+ if (this.mediaState === media_state.Recording) {
setTimeout(this.updateRecordTime, 30);
- if (this._paused) {
- this._pausedTime += (new Date().getTime() - this._recordStart) / 1000;
- } else {
- this.layoutDoc._currentTimecode =
- (new Date().getTime() - this._recordStart - this.pauseTime) / 1000;
+ if (!this._paused) {
+ this.layoutDoc._currentTimecode = (new Date().getTime() - this._recordStart - this._pausedTime) / 1000;
}
}
- }
+ };
// starts recording
recordAudioAnnotation = async () => {
this._stream = await navigator.mediaDevices.getUserMedia({ audio: true });
this._recorder = new MediaRecorder(this._stream);
- this.dataDoc[this.props.fieldKey + "-recordingStart"] = new DateField(
- new Date()
- );
+ this.dataDoc[this.fieldKey + '-recordingStart'] = new DateField();
DocUtils.ActiveRecordings.push(this);
this._recorder.ondataavailable = async (e: any) => {
- console.log("Data available", e);
const [{ result }] = await Networking.UploadFilesToServer(e.data);
- console.log("Data result", result);
if (!(result instanceof Error)) {
- this.props.Document[this.props.fieldKey] = new AudioField(result.accessPaths.agnostic.client);
+ this.props.Document[this.fieldKey] = new AudioField(result.accessPaths.agnostic.client);
}
};
this._recordStart = new Date().getTime();
- runInAction(() => (this.mediaState = "recording"));
- setTimeout(this.updateRecordTime, 0);
+ runInAction(() => (this.mediaState = media_state.Recording));
+ setTimeout(this.updateRecordTime);
this._recorder.start();
- setTimeout(() => this._recorder && this.stopRecording(), 60 * 60 * 1000); // stop after an hour
- }
+ setTimeout(this.stopRecording, 60 * 60 * 1000); // stop after an hour
+ };
+
+ // stops recording
+ @action
+ stopRecording = () => {
+ if (this._recorder) {
+ this._recorder.stop();
+ this._recorder = undefined;
+ const now = new Date().getTime();
+ this._paused && (this._pausedTime += now - this._pauseStart);
+ this.dataDoc[this.fieldKey + '-duration'] = (now - this._recordStart - this._pausedTime) / 1000;
+ this.mediaState = media_state.Paused;
+ this._stream?.getAudioTracks()[0].stop();
+ const ind = DocUtils.ActiveRecordings.indexOf(this);
+ ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1);
+ }
+ };
// context menu
specificContextMenu = (e: React.MouseEvent): void => {
const funcs: ContextMenuProps[] = [];
funcs.push({
- description:
- (this.layoutDoc.hideAnchors ? "Don't hide" : "Hide") + " anchors",
- event: () => (this.layoutDoc.hideAnchors = !this.layoutDoc.hideAnchors),
- icon: "expand-arrows-alt",
+ description: (this.layoutDoc.hideAnchors ? "Don't hide" : 'Hide') + ' anchors',
+ event: e => (this.layoutDoc.hideAnchors = !this.layoutDoc.hideAnchors),
+ icon: 'expand-arrows-alt',
+ });
+ funcs.push({
+ description: (this.layoutDoc.dontAutoFollowLinks ? '' : "Don't") + ' follow links when encountered',
+ event: e => (this.layoutDoc.dontAutoFollowLinks = !this.layoutDoc.dontAutoFollowLinks),
+ icon: 'expand-arrows-alt',
});
funcs.push({
- description:
- (this.layoutDoc.dontAutoPlayFollowedLinks ? "" : "Don't") +
- " play when link is selected",
- event: () =>
- (this.layoutDoc.dontAutoPlayFollowedLinks =
- !this.layoutDoc.dontAutoPlayFollowedLinks),
- icon: "expand-arrows-alt",
+ description: (this.layoutDoc.dontAutoPlayFollowedLinks ? '' : "Don't") + ' play when link is selected',
+ event: e => (this.layoutDoc.dontAutoPlayFollowedLinks = !this.layoutDoc.dontAutoPlayFollowedLinks),
+ icon: 'expand-arrows-alt',
});
funcs.push({
- description:
- (this.layoutDoc.autoPlayAnchors ? "Don't auto play" : "Auto play") +
- " anchors onClick",
- event: () =>
- (this.layoutDoc.autoPlayAnchors = !this.layoutDoc.autoPlayAnchors),
- icon: "expand-arrows-alt",
+ description: (this.layoutDoc.autoPlayAnchors ? "Don't auto" : 'Auto') + ' play anchors onClick',
+ event: e => (this.layoutDoc.autoPlayAnchors = !this.layoutDoc.autoPlayAnchors),
+ icon: 'expand-arrows-alt',
});
ContextMenu.Instance?.addItem({
- description: "Options...",
+ description: 'Options...',
subitems: funcs,
- icon: "asterisk",
+ icon: 'asterisk',
});
- }
-
- // stops the recording
- stopRecording = action(() => {
- this._recorder.stop();
- this._recorder = undefined;
- this.dataDoc[this.fieldKey + "-duration"] =
- (new Date().getTime() - this._recordStart - this.pauseTime) / 1000;
- this.mediaState = "paused";
- this._trimEnd = this.duration;
- this.layoutDoc.clipStart = 0;
- this.layoutDoc.clipEnd = this.duration;
- this._stream?.getAudioTracks()[0].stop();
- const ind = DocUtils.ActiveRecordings.indexOf(this);
- ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1);
- });
+ };
// button for starting and stopping the recording
- recordClick = (e: React.MouseEvent) => {
- if (e.button === 0 && !e.ctrlKey) {
- this._recorder ? this.stopRecording() : this.recordAudioAnnotation();
- e.stopPropagation();
- }
- }
+ Record = (e: React.PointerEvent) => {
+ e.button === 0 &&
+ !e.ctrlKey &&
+ setupMoveUpEvents(
+ this,
+ e,
+ returnFalse,
+ returnFalse,
+ action(() => {
+ this._recorder ? this.stopRecording() : this.recordAudioAnnotation();
+ }),
+ false
+ );
+ };
// for play button
Play = (e?: any) => {
- let start;
- if (this._ended || this._ele!.currentTime === this.duration) {
- start = this._trimStart;
- this._ended = false;
- }
- else {
- start = this._ele!.currentTime;
+ e?.stopPropagation?.();
+
+ if (this.timeline && this._ele) {
+ const eleTime = this._ele.currentTime;
+
+ // if curr timecode outside of trim bounds, set it to start
+ let start = eleTime >= this.timeline.trimEnd || eleTime <= this.timeline.trimStart ? this.timeline.trimStart : eleTime;
+
+ // restarts clip if reached end on last play
+ if (this._finished) {
+ this._finished = false;
+ start = this.timeline.trimStart;
+ }
+
+ this.playFrom(start, this.timeline.trimEnd, true);
}
+ };
- this.playFrom(start, this._trimEnd, true);
- e?.stopPropagation?.();
- }
+ // pause play back
+ @action
+ Pause = () => {
+ if (this._ele) {
+ this._ele.pause();
+ this.mediaState = media_state.Paused;
+
+ // if paused in the middle of playback, prevents restart on next play
+ if (!this._finished) clearTimeout(this._play);
+ this.removeCurrentlyPlaying();
+ }
+ };
- // creates a text document for dictation
+ // for dictation button, creates a text document for dictation
onFile = (e: any) => {
- const newDoc = CurrentUserUtils.GetNewTextDoc(
- "",
- NumCast(this.props.Document.x),
- NumCast(this.props.Document.y) +
- NumCast(this.props.Document._height) +
- 10,
- NumCast(this.props.Document._width),
- 2 * NumCast(this.props.Document._height)
- );
- Doc.GetProto(newDoc).recordingSource = this.dataDoc;
- Doc.GetProto(newDoc).recordingStart = ComputedField.MakeFunction(
- `self.recordingSource["${this.props.fieldKey}-recordingStart"]`
- );
- Doc.GetProto(newDoc).mediaState = ComputedField.MakeFunction(
- "self.recordingSource.mediaState"
+ setupMoveUpEvents(
+ this,
+ e,
+ returnFalse,
+ returnFalse,
+ action(() => {
+ const newDoc = DocUtils.GetNewTextDoc('', NumCast(this.rootDoc.x), NumCast(this.rootDoc.y) + NumCast(this.layoutDoc._height) + 10, NumCast(this.layoutDoc._width), 2 * NumCast(this.layoutDoc._height));
+ Doc.GetProto(newDoc).recordingSource = this.dataDoc;
+ Doc.GetProto(newDoc).recordingStart = ComputedField.MakeFunction(`self.recordingSource["${this.fieldKey}-recordingStart"]`);
+ Doc.GetProto(newDoc).mediaState = ComputedField.MakeFunction('self.recordingSource.mediaState');
+ if (DocListCast(Doc.MyOverlayDocs?.data).includes(this.rootDoc)) {
+ newDoc.x = this.rootDoc.x;
+ newDoc.y = NumCast(this.rootDoc.y) + NumCast(this.rootDoc._height);
+ Doc.AddDocToList(Doc.MyOverlayDocs, undefined, newDoc);
+ } else {
+ this.props.addDocument?.(newDoc);
+ }
+ }),
+ false
);
- this.props.addDocument?.(newDoc);
- e.stopPropagation();
- }
+ };
- // ref for updating time
+ // sets <audio> ref for updating time
setRef = (e: HTMLAudioElement | null) => {
- e?.addEventListener("timeupdate", this.timecodeChanged);
- e?.addEventListener("ended", this.Pause);
+ e?.addEventListener('timeupdate', this.timecodeChanged);
+ e?.addEventListener('ended', () => {
+ this._finished = true;
+ this.Pause();
+ });
this._ele = e;
- }
-
- // returns the path of the audio file
- @computed get path() {
- const field = Cast(this.props.Document[this.props.fieldKey], AudioField);
- const path = field instanceof AudioField ? field.url.href : "";
- return path === nullAudio ? "" : path;
- }
-
- // returns the html audio element
- @computed get audio() {
- return <audio ref={this.setRef} className={`audiobox-control${this.props.isContentActive() ? "-interactive" : ""}`}>
- <source src={this.path} type="audio/mpeg" />
- Not supported.
- </audio>;
- }
+ };
// pause the time during recording phase
- @action
- recordPause = (e: React.MouseEvent) => {
- this._pauseStart = new Date().getTime();
- this._paused = true;
- this._recorder.pause();
- e.stopPropagation();
- }
+ recordPause = (e: React.PointerEvent) => {
+ setupMoveUpEvents(
+ this,
+ e,
+ returnFalse,
+ returnFalse,
+ action(() => {
+ this._pauseStart = new Date().getTime();
+ this._paused = true;
+ this._recorder.pause();
+ }),
+ false
+ );
+ };
// continue the recording
- @action
- recordPlay = (e: React.MouseEvent) => {
- this._pauseEnd = new Date().getTime();
- this._paused = false;
- this._recorder.resume();
- e.stopPropagation();
- }
+ recordPlay = (e: React.PointerEvent) => {
+ setupMoveUpEvents(
+ this,
+ e,
+ returnFalse,
+ returnFalse,
+ action(() => {
+ this._paused = false;
+ this._pausedTime += new Date().getTime() - this._pauseStart;
+ this._recorder.resume();
+ }),
+ false
+ );
+ };
- playing = () => this.mediaState === "playing";
+ // plays link
playLink = (link: Doc) => {
- const stack = this._stackedTimeline.current;
if (link.annotationOn === this.rootDoc) {
if (!this.layoutDoc.dontAutoPlayFollowedLinks) {
- this.playFrom(stack?.anchorStart(link) || 0, stack?.anchorEnd(link));
+ this.playFrom(this.timeline?.anchorStart(link) || 0, this.timeline?.anchorEnd(link));
} else {
- this._ele!.currentTime = this.layoutDoc._currentTimecode =
- stack?.anchorStart(link) || 0;
+ this._ele!.currentTime = this.layoutDoc._currentTimecode = this.timeline?.anchorStart(link) || 0;
}
} else {
this.links
- .filter((l) => l.anchor1 === link || l.anchor2 === link)
- .forEach((l) => {
+ .filter(l => l.anchor1 === link || l.anchor2 === link)
+ .forEach(l => {
const { la1, la2 } = this.getLinkData(l);
- const startTime = stack?.anchorStart(la1) || stack?.anchorStart(la2);
- const endTime = stack?.anchorEnd(la1) || stack?.anchorEnd(la2);
+ const startTime = this.timeline?.anchorStart(la1) || this.timeline?.anchorStart(la2);
+ const endTime = this.timeline?.anchorEnd(la1) || this.timeline?.anchorEnd(la2);
if (startTime !== undefined) {
if (!this.layoutDoc.dontAutoPlayFollowedLinks) {
- endTime
- ? this.playFrom(startTime, endTime)
- : this.playFrom(startTime);
+ this.playFrom(startTime, endTime);
} else {
- this._ele!.currentTime = this.layoutDoc._currentTimecode =
- startTime;
+ this._ele!.currentTime = this.layoutDoc._currentTimecode = startTime;
}
}
});
}
- }
+ };
- // shows trim controls
@action
- startTrim = () => {
- if (!this.duration) {
- this.timecodeChanged();
- }
- if (this.mediaState === "playing") {
- this.Pause();
- }
- this._trimming = true;
- }
+ timelineWhenChildContentsActiveChanged = (isActive: boolean) => this.props.whenChildContentsActiveChanged((this._isAnyChildContentActive = isActive));
- // hides trim controls and displays new clip
- @action
+ timelineScreenToLocal = () => this.props.ScreenToLocalTransform().translate(0, -AudioBox.bottomControlsHeight);
+
+ setPlayheadTime = (time: number) => (this._ele!.currentTime = this.layoutDoc._currentTimecode = time);
+
+ playing = () => this.mediaState === media_state.Playing;
+
+ isActiveChild = () => this._isAnyChildContentActive;
+
+ // timeline dimensions
+ timelineWidth = () => this.props.PanelWidth();
+ timelineHeight = () => this.props.PanelHeight() - (AudioBox.topControlsHeight + AudioBox.bottomControlsHeight);
+
+ // ends trim, hides trim controls and displays new clip
+ @undoBatch
finishTrim = () => {
- if (this.mediaState === "playing") {
- this.Pause();
- }
- this.layoutDoc.clipStart = this._trimStart;
- this.layoutDoc.clipEnd = this._trimEnd;
- this._trimming = false;
- this.setAnchorTime(Math.max(Math.min(this._trimEnd, this._ele!.currentTime), this._trimStart));
- }
+ this.Pause();
+ this.setPlayheadTime(Math.max(Math.min(this.timeline?.trimEnd || 0, this._ele!.currentTime), this.timeline?.trimStart || 0));
+ this.timeline?.StopTrimming();
+ };
+
+ // displays trim controls to start trimming clip
+ startTrim = (scope: TrimScope) => {
+ this.Pause();
+ this.timeline?.StartTrimming(scope);
+ };
+
+ // for trim button, double click displays full clip, single displays curr trim bounds
+ onClipPointerDown = (e: React.PointerEvent) => {
+ e.stopPropagation();
+ this.timeline &&
+ setupMoveUpEvents(
+ this,
+ e,
+ returnFalse,
+ returnFalse,
+ action((e: PointerEvent, doubleTap?: boolean) => {
+ if (doubleTap) {
+ this.startTrim(TrimScope.All);
+ } else if (this.timeline) {
+ this.Pause();
+ this.timeline.IsTrimming !== TrimScope.None ? this.finishTrim() : this.startTrim(TrimScope.Clip);
+ }
+ })
+ );
+ };
+ // for zoom slider, sets timeline waveform zoom
+ zoom = (zoom: number) => {
+ this.timeline?.setZoom(zoom);
+ };
+
+ // for volume slider sets volume
@action
- setStartTrim = (newStart: number) => {
- this._trimStart = newStart;
- }
+ setVolume = (volume: number) => {
+ if (this._ele) {
+ this._volume = volume;
+ this._ele.volume = volume;
+ if (this._muted) {
+ this.toggleMute();
+ }
+ }
+ };
+ // toggles audio muted
@action
- setEndTrim = (newEnd: number) => {
- this._trimEnd = newEnd;
+ toggleMute = () => {
+ if (this._ele) {
+ this._muted = !this._muted;
+ this._ele.muted = this._muted;
+ }
+ };
+
+ setupTimelineDrop = (r: HTMLDivElement | null) => {
+ if (r && this.timeline) {
+ this._dropDisposer?.();
+ this._dropDisposer = DragManager.MakeDropTarget(
+ r,
+ (e, de) => {
+ const [xp, yp] = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y);
+ de.complete.docDragData && this.timeline.internalDocDrop(e, de, de.complete.docDragData, xp);
+ },
+ this.layoutDoc,
+ undefined
+ );
+ }
+ };
+
+ // UI for recording, initially displayed when new audio created in Dash
+ @computed get recordingControls() {
+ return (
+ <div className="audiobox-recorder">
+ <div className="audiobox-dictation" onPointerDown={this.onFile}>
+ <FontAwesomeIcon size="2x" icon="file-alt" />
+ </div>
+ {[media_state.Recording, media_state.Playing].includes(this.mediaState) ? (
+ <div className="recording-controls" onClick={e => e.stopPropagation()}>
+ <div className="record-button" onPointerDown={this.Record}>
+ <FontAwesomeIcon size="2x" icon="stop" />
+ </div>
+ <div className="record-button" onPointerDown={this._paused ? this.recordPlay : this.recordPause}>
+ <FontAwesomeIcon size="2x" icon={this._paused ? 'play' : 'pause'} />
+ </div>
+ <div className="record-timecode">{formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode)))}</div>
+ </div>
+ ) : (
+ <div className="audiobox-start-record" onPointerDown={this.Record}>
+ <FontAwesomeIcon icon="microphone" />
+ RECORD
+ </div>
+ )}
+ </div>
+ );
}
- isActiveChild = () => this._isAnyChildContentActive;
- timelineWhenChildContentsActiveChanged = (isActive: boolean) =>
- this.props.whenChildContentsActiveChanged(
- runInAction(() => (this._isAnyChildContentActive = isActive))
- )
- timelineScreenToLocal = () =>
- this.props
- .ScreenToLocalTransform()
- .translate(
- -AudioBox.playheadWidth,
- (-(100 - this.heightPercent) / 200) * this.props.PanelHeight()
- )
- setAnchorTime = (time: number) => {
- (this._ele!.currentTime = this.layoutDoc._currentTimecode = time);
+ // UI for playback, displayed for imported or recorded clips, hides timeline and collapses controls when node is shrunk vertically
+ @computed get playbackControls() {
+ return (
+ <div
+ className="audiobox-file"
+ style={{
+ pointerEvents: this._isAnyChildContentActive || this.props.isContentActive() ? 'all' : 'none',
+ flexDirection: this.miniPlayer ? 'row' : 'column',
+ justifyContent: this.miniPlayer ? 'flex-start' : 'space-between',
+ }}>
+ <div className="audiobox-controls">
+ <div className="controls-left">
+ <div
+ className="audiobox-button"
+ title={this.mediaState === media_state.Paused ? 'play' : 'pause'}
+ onPointerDown={
+ this.mediaState === media_state.Paused
+ ? this.Play
+ : e => {
+ e.stopPropagation();
+ this.Pause();
+ }
+ }>
+ <FontAwesomeIcon icon={this.mediaState === media_state.Paused ? 'play' : 'pause'} size={'1x'} />
+ </div>
+
+ {!this.miniPlayer && (
+ <div className="audiobox-button" title={this.timeline?.IsTrimming !== TrimScope.None ? 'finish' : 'trim'} onPointerDown={this.onClipPointerDown}>
+ <FontAwesomeIcon icon={this.timeline?.IsTrimming !== TrimScope.None ? 'check' : 'cut'} size={'1x'} />
+ </div>
+ )}
+ </div>
+ <div className="controls-right">
+ <div
+ className="audiobox-button"
+ title={this._muted ? 'unmute' : 'mute'}
+ onPointerDown={e => {
+ e.stopPropagation();
+ this.toggleMute();
+ }}>
+ <FontAwesomeIcon icon={this._muted ? 'volume-mute' : 'volume-up'} />
+ </div>
+ <input
+ type="range"
+ step="0.1"
+ min="0"
+ max="1"
+ value={this._muted ? 0 : this._volume}
+ className="toolbar-slider volume"
+ onPointerDown={(e: React.PointerEvent) => {
+ e.stopPropagation();
+ }}
+ onChange={(e: React.ChangeEvent<HTMLInputElement>) => this.setVolume(Number(e.target.value))}
+ />
+ </div>
+ </div>
+
+ <div className="audiobox-playback" style={{ width: this.miniPlayer ? 0 : '100%' }}>
+ <div className="audiobox-timeline">{this.renderTimeline}</div>
+ </div>
+
+ {this.audio}
+
+ <div className="audiobox-timecodes">
+ <div className="timecode-current">{this.timeline && formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this.timeline.clipStart)))}</div>
+ {this.miniPlayer ? (
+ <div>/</div>
+ ) : (
+ <div className="bottom-controls-middle">
+ <FontAwesomeIcon icon="search-plus" />
+ <input
+ type="range"
+ step="0.1"
+ min="1"
+ max="5"
+ value={this.timeline?._zoomFactor}
+ className="toolbar-slider"
+ id="zoom-slider"
+ onPointerDown={(e: React.PointerEvent) => {
+ e.stopPropagation();
+ }}
+ onChange={(e: React.ChangeEvent<HTMLInputElement>) => {
+ this.zoom(Number(e.target.value));
+ }}
+ />
+ </div>
+ )}
+
+ <div className="timecode-duration">{this.timeline && formatTime(Math.round(this.timeline.clipDuration))}</div>
+ </div>
+ </div>
+ );
}
- timelineHeight = () =>
- (((this.props.PanelHeight() * this.heightPercent) / 100) *
- this.heightPercent) /
- 100 // panelHeight * heightPercent is player height. * heightPercent is timeline height (as per css inline)
- timelineWidth = () => this.props.PanelWidth() - AudioBox.playheadWidth;
+ // gets CollectionStackedTimeline
@computed get renderTimeline() {
return (
<CollectionStackedTimeline
- ref={this._stackedTimeline}
- {...this.props}
+ ref={action((r: any) => (this._stackedTimeline = r))}
+ {...OmitKeys(this.props, ['CollectionFreeFormDocumentView']).omit}
fieldKey={this.annotationKey}
- dictationKey={this.fieldKey + "-dictation"}
+ dictationKey={this.fieldKey + '-dictation'}
mediaPath={this.path}
renderDepth={this.props.renderDepth + 1}
- startTag={"_timecodeToShow" /* audioStart */}
- endTag={"_timecodeToHide" /* audioEnd */}
- focus={DocUtils.DefaultFocus}
+ startTag={'_timecodeToShow' /* audioStart */}
+ endTag={'_timecodeToHide' /* audioEnd */}
bringToFront={emptyFunction}
CollectionView={undefined}
- duration={this.duration}
playFrom={this.playFrom}
- setTime={this.setAnchorTime}
+ setTime={this.setPlayheadTime}
playing={this.playing}
- whenChildContentsActiveChanged={
- this.timelineWhenChildContentsActiveChanged
- }
+ whenChildContentsActiveChanged={this.timelineWhenChildContentsActiveChanged}
moveDocument={this.moveDocument}
addDocument={this.addDocument}
removeDocument={this.removeDocument}
@@ -565,141 +667,28 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
playLink={this.playLink}
PanelWidth={this.timelineWidth}
PanelHeight={this.timelineHeight}
- trimming={this._trimming}
- trimStart={this._trimStart}
- trimEnd={this._trimEnd}
- trimDuration={this.trimDuration}
- setStartTrim={this.setStartTrim}
- setEndTrim={this.setEndTrim}
+ rawDuration={this.rawDuration}
/>
);
}
+ // returns the html audio element
+ @computed get audio() {
+ return (
+ <audio
+ ref={this.setRef}
+ className={`audiobox-control${this.props.isContentActive() ? '-interactive' : ''}`}
+ onLoadedData={action(e => this._ele?.duration && this._ele?.duration !== Infinity && (this.dataDoc[this.fieldKey + '-duration'] = this._ele.duration))}>
+ <source src={this.path} type="audio/mpeg" />
+ Not supported.
+ </audio>
+ );
+ }
+
render() {
- const interactive =
- SnappingManager.GetIsDragging() || this.props.isContentActive()
- ? "-interactive"
- : "";
return (
- <div
- className="audiobox-container"
- onContextMenu={this.specificContextMenu}
- onClick={
- !this.path && !this._recorder ? this.recordAudioAnnotation : undefined
- }
- style={{
- pointerEvents:
- this.props.layerProvider?.(this.layoutDoc) === false
- ? "none"
- : undefined,
- }}
- >
- {!this.path ? (
- <div className="audiobox-buttons">
- <div className="audiobox-dictation" onClick={this.onFile}>
- <FontAwesomeIcon
- style={{
- width: "30px"
- }}
- icon="file-alt"
- size={this.props.PanelHeight() < 36 ? "1x" : "2x"}
- />
- </div>
- {this.mediaState === "recording" || this.mediaState === "paused" ? (
- <div className="recording" onClick={(e) => e.stopPropagation()}>
- <div className="recording-buttons" onClick={this.recordClick}>
- <FontAwesomeIcon
- icon={"stop"}
- size={this.props.PanelHeight() < 36 ? "1x" : "2x"}
- />
- </div>
- <div
- className="recording-buttons"
- onClick={this._paused ? this.recordPlay : this.recordPause}
- >
- <FontAwesomeIcon
- icon={this._paused ? "play" : "pause"}
- size={this.props.PanelHeight() < 36 ? "1x" : "2x"}
- />
- </div>
- <div className="time">
- {formatTime(
- Math.round(NumCast(this.layoutDoc._currentTimecode))
- )}
- </div>
- </div>
- ) : (
- <div
- className={`audiobox-record${interactive}`}
- style={{ backgroundColor: Colors.DARK_GRAY }}
- >
- <FontAwesomeIcon icon="microphone" />
- RECORD
- </div>
- )}
- </div>
- ) : (
- <div
- className="audiobox-controls"
- style={{
- pointerEvents:
- this._isAnyChildContentActive || this.props.isContentActive()
- ? "all"
- : "none",
- }}
- >
- <div className="audiobox-dictation" />
- <div
- className="audiobox-player"
- style={{ height: `${AudioBox.heightPercent}%` }}
- >
- <div
- className="audiobox-buttons"
- title={this.mediaState === "paused" ? "play" : "pause"}
- onClick={this.mediaState === "paused" ? this.Play : this.Pause}
- >
- {" "}
- <FontAwesomeIcon
- icon={this.mediaState === "paused" ? "play" : "pause"}
- size={"1x"}
- />
- </div>
- <div
- className="audiobox-buttons"
- title={this._trimming ? "finish" : "trim"}
- onClick={this._trimming ? this.finishTrim : this.startTrim}
- >
- <FontAwesomeIcon
- icon={this._trimming ? "check" : "cut"}
- size={"1x"}
- />
- </div>
- <div
- className="audiobox-timeline"
- style={{
- top: 0,
- height: `100%`,
- left: AudioBox.playheadWidth,
- width: `calc(100% - ${AudioBox.playheadWidth}px)`,
- background: "white",
- }}
- >
- {this.renderTimeline}
- </div>
- {this.audio}
- <div className="audioBox-current-time">
- {this._trimming ?
- formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode)))
- : formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this._trimStart)))}
- </div>
- <div className="audioBox-total-time">
- {this._trimming || !this._trimEnd ?
- formatTime(Math.round(NumCast(this.duration)))
- : formatTime(Math.round(NumCast(this.trimDuration)))}
- </div>
- </div>
- </div>
- )}
+ <div ref={this.setupTimelineDrop} className="audiobox-container" onContextMenu={this.specificContextMenu} style={{ pointerEvents: this.layoutDoc._lockedPosition ? 'none' : undefined }}>
+ {!this.path ? this.recordingControls : this.playbackControls}
</div>
);
}