aboutsummaryrefslogtreecommitdiff
path: root/src/client/views/nodes/AudioBox.tsx
diff options
context:
space:
mode:
Diffstat (limited to 'src/client/views/nodes/AudioBox.tsx')
-rw-r--r--src/client/views/nodes/AudioBox.tsx108
1 files changed, 83 insertions, 25 deletions
diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx
index 9351bc3be..f5de31fcb 100644
--- a/src/client/views/nodes/AudioBox.tsx
+++ b/src/client/views/nodes/AudioBox.tsx
@@ -22,6 +22,22 @@ import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from "../DocComp
import "./AudioBox.scss";
import { FieldView, FieldViewProps } from "./FieldView";
+
+/**
+ * AudioBox
+ * Main component: AudioBox.tsx
+ * Supporting Components: CollectionStackedTimeline, AudioWaveform
+ *
+ * AudioBox is a node that supports the recording and playback of audio files in Dash.
+ * When an audio file is importeed into Dash, it is immediately rendered as an AudioBox document.
+ * When a blank AudioBox node is created in Dash, audio recording controls are displayed and the user can start a recording which can be paused or stopped, and can use dictation to create a text transcript.
+ * Recording is done using the MediaDevices API to access the user's device microphone (see recordAudioAnnotation below)
+ * CollectionStackedTimeline handles AudioBox and VideoBox shared behavior, but AudioBox handles playing, pausing, etc because it contains <audio> element
+ * User can trim audio: nondestructive, just sets new bounds for playback and rendering timelin
+ */
+
+
+// used as a wrapper class for MediaStream from MediaDevices API
declare class MediaRecorder {
constructor(e: any); // whatever MediaRecorder has
}
@@ -35,44 +51,42 @@ enum media_state {
Paused = "paused",
Playing = "playing"
}
+
+
@observer
export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProps & FieldViewProps, AudioDocument>(AudioDocument) {
public static LayoutString(fieldKey: string) { return FieldView.LayoutString(AudioBox, fieldKey); }
- public static SetScrubTime = action((timeInMillisFrom1970: number) => {
- AudioBox._scrubTime = 0;
- AudioBox._scrubTime = timeInMillisFrom1970;
- });
public static Enabled = false;
- static topControlsHeight = 30; // width of playhead
- static bottomControlsHeight = 20; // height of timeline in percent of height of audioBox.
- @observable static _scrubTime = 0;
+
+ static topControlsHeight = 30; // height of upper controls above timeline
+ static bottomControlsHeight = 20; // height of lower controls below timeline
_dropDisposer?: DragManager.DragDropDisposer;
_disposers: { [name: string]: IReactionDisposer } = {};
- _ele: HTMLAudioElement | null = null;
- _recorder: any;
+ _ele: HTMLAudioElement | null = null; // <audio> ref
+ _recorder: any; // MediaRecorder
_recordStart = 0;
- _pauseStart = 0;
+ _pauseStart = 0; // time when recording is paused (used to keep track of recording timecodes)
_pauseEnd = 0;
_pausedTime = 0;
- _stream: MediaStream | undefined;
- _play: any = null;
+ _stream: MediaStream | undefined; // passed to MediaRecorder, records device input audio
+ _play: any = null; // timeout for playback
- @observable _stackedTimeline: any;
- @observable _finished: boolean = false;
+ @observable _stackedTimeline: any; // CollectionStackedTimeline ref
+ @observable _finished: boolean = false; // has playback reached end of clip
@observable _volume: number = 1;
@observable _muted: boolean = false;
- @observable _paused: boolean = false;
+ @observable _paused: boolean = false; // is recording paused
// @observable rawDuration: number = 0; // computed from the length of the audio element when loaded
@computed get recordingStart() { return DateCast(this.dataDoc[this.fieldKey + "-recordingStart"])?.date.getTime(); }
@computed get rawDuration() { return NumCast(this.dataDoc[`${this.fieldKey}-duration`]); } // bcz: shouldn't be needed since it's computed from audio element
// mehek: not 100% sure but i think due to the order in which things are loaded this is necessary ^^
// if you get rid of it and set the value to 0 the timeline and waveform will set their bounds incorrectly
- @computed get miniPlayer() { return this.props.PanelHeight() < 50 }
+ @computed get miniPlayer() { return this.props.PanelHeight() < 50 } // used to collapse timeline when node is shrunk
@computed get links() { return DocListCast(this.dataDoc.links); }
- @computed get pauseTime() { return this._pauseEnd - this._pauseStart; } // total time paused to update the correct time
+ @computed get pauseTime() { return this._pauseEnd - this._pauseStart; } // total time paused to update the correct recording time
@computed get mediaState() { return this.layoutDoc.mediaState as media_state; }
@computed get path() { // returns the path of the audio file
const path = Cast(this.props.Document[this.fieldKey], AudioField, null)?.url.href || "";
@@ -80,12 +94,15 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
set mediaState(value) { this.layoutDoc.mediaState = value; }
- get timeline() { return this._stackedTimeline; } // can't be computed since it's not observable
+ @computed get timeline() { return this._stackedTimeline; } // returns CollectionStackedTimeline ref
+
componentWillUnmount() {
this.removeCurrentlyPlaying();
this._dropDisposer?.();
Object.values(this._disposers).forEach((disposer) => disposer?.());
+
+ // removes doc from active recordings if recording when closed
const ind = DocUtils.ActiveRecordings.indexOf(this);
ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1);
}
@@ -102,6 +119,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
getLinkData(l: Doc) {
let la1 = l.anchor1 as Doc;
let la2 = l.anchor2 as Doc;
@@ -131,7 +149,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
) || this.rootDoc;
}
- // for updating the timecode
+
+ // updates timecode and shows it in timeline, follows links at time
@action
timecodeChanged = () => {
if (this.mediaState !== media_state.Recording && this._ele) {
@@ -148,7 +167,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
- // play back the audio from time
+ // play back the audio from seekTimeInSeconds, fullPlay tells whether clip is being played to end vs link range
@action
playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false) => {
clearTimeout(this._play); // abort any previous clip ending
@@ -156,8 +175,10 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500);
}
else if (this.timeline && this._ele && AudioBox.Enabled) {
+ // trimBounds override requested playback bounds
const end = Math.min(this.timeline.trimEnd, endTime ?? this.timeline.trimEnd);
const start = Math.max(this.timeline.trimStart, seekTimeInSeconds);
+ // checks if times are within clip range
if (seekTimeInSeconds >= 0 && this.timeline.trimStart <= end && seekTimeInSeconds <= this.timeline.trimEnd) {
this._ele.currentTime = start;
this._ele.play();
@@ -165,6 +186,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this.addCurrentlyPlaying();
this._play = setTimeout(
() => {
+ // need to keep track of if end of clip is reached so on next play, clip restarts
if (fullPlay) this._finished = true;
// removes from currently playing if playback has reached end of range marker
else this.removeCurrentlyPlaying();
@@ -177,6 +199,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
// removes from currently playing display
@action
removeCurrentlyPlaying = () => {
@@ -186,6 +209,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+ // adds doc to currently playing display
@action
addCurrentlyPlaying = () => {
if (!CollectionStackedTimeline.CurrentlyPlaying) {
@@ -196,6 +220,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
// update the recording time
updateRecordTime = () => {
if (this.mediaState === media_state.Recording) {
@@ -227,6 +252,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
setTimeout(this.stopRecording, 60 * 60 * 1000); // stop after an hour
}
+ // stops recording
@action
stopRecording = () => {
if (this._recorder) {
@@ -240,6 +266,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
// context menu
specificContextMenu = (e: React.MouseEvent): void => {
const funcs: ContextMenuProps[] = [];
@@ -270,6 +297,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
});
}
+
// button for starting and stopping the recording
Record = (e: React.MouseEvent) => {
if (e.button === 0 && !e.ctrlKey) {
@@ -284,11 +312,16 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
if (this.timeline && this._ele) {
const eleTime = this._ele.currentTime;
+
+ // if curr timecode outside of trim bounds, set it to start
let start = eleTime >= this.timeline.trimEnd || eleTime <= this.timeline.trimStart ? this.timeline.trimStart : eleTime;
+
+ // restarts clip if reached end on last play
if (this._finished) {
this._finished = false;
start = this.timeline.trimStart;
}
+
this.playFrom(start, this.timeline.trimEnd, true);
}
}
@@ -299,12 +332,14 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
if (this._ele) {
this._ele.pause();
this.mediaState = media_state.Paused;
+
+ // if paused in the middle of playback, prevents restart on next play
if (!this._finished) clearTimeout(this._play);
this.removeCurrentlyPlaying();
}
}
- // creates a text document for dictation
+ // for dictation button, creates a text document for dictation
onFile = (e: any) => {
const newDoc = CurrentUserUtils.GetNewTextDoc(
"",
@@ -326,13 +361,15 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
e.stopPropagation();
}
- // ref for updating time
+
+ // sets <audio> ref for updating time
setRef = (e: HTMLAudioElement | null) => {
e?.addEventListener("timeupdate", this.timecodeChanged);
e?.addEventListener("ended", () => { this._finished = true; this.Pause() });
this._ele = e;
}
+
// pause the time during recording phase
@action
recordPause = (e: React.MouseEvent) => {
@@ -351,6 +388,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
e.stopPropagation();
}
+
+ // plays link
playLink = (link: Doc) => {
if (link.annotationOn === this.rootDoc) {
if (!this.layoutDoc.dontAutoPlayFollowedLinks) {
@@ -376,30 +415,39 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
@action
timelineWhenChildContentsActiveChanged = (isActive: boolean) =>
- this.props.whenChildContentsActiveChanged(this._isAnyChildContentActive = isActive)
+ this.props.whenChildContentsActiveChanged(this._isAnyChildContentActive = isActive);
+
timelineScreenToLocal = () =>
- this.props.ScreenToLocalTransform().translate(0, -AudioBox.bottomControlsHeight)
+ this.props.ScreenToLocalTransform().translate(0, -AudioBox.bottomControlsHeight);
+
setPlayheadTime = (time: number) => this._ele!.currentTime = this.layoutDoc._currentTimecode = time;
+
playing = () => this.mediaState === media_state.Playing;
+
isActiveChild = () => this._isAnyChildContentActive;
+ // timeline dimensions
timelineWidth = () => this.props.PanelWidth();
timelineHeight = () => (this.props.PanelHeight() - (AudioBox.topControlsHeight + AudioBox.bottomControlsHeight))
+ // ends trim, hides trim controls and displays new clip
@undoBatch
- finishTrim = () => { // hides trim controls and displays new clip
+ finishTrim = () => {
this.Pause();
this.setPlayheadTime(Math.max(Math.min(this.timeline?.trimEnd || 0, this._ele!.currentTime), this.timeline?.trimStart || 0));
this.timeline?.StopTrimming();
}
+ // displays trim controls to start trimming clip
startTrim = (scope: TrimScope) => {
this.Pause();
this.timeline?.StartTrimming(scope);
}
+ // for trim button, double click displays full clip, single displays curr trim bounds
onClipPointerDown = (e: React.PointerEvent) => {
e.stopPropagation();
this.timeline && setupMoveUpEvents(this, e, returnFalse, returnFalse, action((e: PointerEvent, doubleTap?: boolean) => {
@@ -412,10 +460,13 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}));
}
+
+ // for zoom slider, sets timeline waveform zoom
zoom = (zoom: number) => {
this.timeline?.setZoom(zoom);
}
+ // for volume slider sets volume
@action
setVolume = (volume: number) => {
if (this._ele) {
@@ -427,6 +478,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+ // toggles audio muted
@action
toggleMute = () => {
if (this._ele) {
@@ -435,6 +487,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
setupTimelineDrop = (r: HTMLDivElement | null) => {
if (r && this.timeline) {
this._dropDisposer?.();
@@ -447,6 +500,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
+ // UI for recording, initially displayed when new audio created in Dash
@computed get recordingControls() {
return <div className="audiobox-recorder">
<div className="audiobox-dictation" onClick={this.onFile}>
@@ -478,6 +533,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
</div>
}
+ // UI for playback, displayed for imported or recorded clips, hides timeline and collapses controls when node is shrunk vertically
@computed get playbackControls() {
return <div className="audiobox-file" style={{
pointerEvents: this._isAnyChildContentActive || this.props.isContentActive() ? "all" : "none",
@@ -544,6 +600,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
</div>
}
+ // gets CollectionStackedTimeline
@computed get renderTimeline() {
return (
<CollectionStackedTimeline
@@ -577,6 +634,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
/>
);
}
+
// returns the html audio element
@computed get audio() {
return <audio ref={this.setRef}