aboutsummaryrefslogtreecommitdiff
path: root/src/client/views/nodes/AudioBox.tsx
diff options
context:
space:
mode:
Diffstat (limited to 'src/client/views/nodes/AudioBox.tsx')
-rw-r--r--src/client/views/nodes/AudioBox.tsx151
1 files changed, 82 insertions, 69 deletions
diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx
index c685ec66f..9deed4de4 100644
--- a/src/client/views/nodes/AudioBox.tsx
+++ b/src/client/views/nodes/AudioBox.tsx
@@ -1,28 +1,34 @@
+/* eslint-disable jsx-a11y/no-static-element-interactions */
+/* eslint-disable jsx-a11y/click-events-have-key-events */
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { Tooltip } from '@mui/material';
import { action, computed, IReactionDisposer, makeObservable, observable, runInAction } from 'mobx';
import { observer } from 'mobx-react';
import * as React from 'react';
+import { returnFalse, setupMoveUpEvents } from '../../../ClientUtils';
import { DateField } from '../../../fields/DateField';
import { Doc } from '../../../fields/Doc';
import { DocData } from '../../../fields/DocSymbols';
import { ComputedField } from '../../../fields/ScriptField';
import { Cast, DateCast, NumCast } from '../../../fields/Types';
import { AudioField, nullAudio } from '../../../fields/URLField';
-import { emptyFunction, formatTime, returnFalse, setupMoveUpEvents } from '../../../Utils';
-import { Docs, DocUtils } from '../../documents/Documents';
+import { formatTime } from '../../../Utils';
+import { Docs } from '../../documents/Documents';
+import { DocumentType } from '../../documents/DocumentTypes';
+import { DocUtils } from '../../documents/DocUtils';
import { Networking } from '../../Network';
import { DragManager } from '../../util/DragManager';
-import { LinkManager } from '../../util/LinkManager';
import { undoBatch } from '../../util/UndoManager';
import { CollectionStackedTimeline, TrimScope } from '../collections/CollectionStackedTimeline';
import { ContextMenu } from '../ContextMenu';
import { ContextMenuProps } from '../ContextMenuItem';
import { ViewBoxAnnotatableComponent } from '../DocComponent';
+import { DocViewUtils } from '../DocViewUtils';
+import { PinDocView, PinProps } from '../PinFuncs';
import './AudioBox.scss';
-import { FocusViewOptions, FieldView, FieldViewProps } from './FieldView';
-import { PinProps, PresBox } from './trails';
-import { OpenWhere } from './DocumentView';
+import { DocumentView } from './DocumentView';
+import { FieldView, FieldViewProps } from './FieldView';
+import { OpenWhere } from './OpenWhere';
/**
* AudioBox
@@ -42,7 +48,7 @@ declare class MediaRecorder {
constructor(e: any); // whatever MediaRecorder has
}
-export enum media_state {
+export enum mediaState {
PendingRecording = 'pendingRecording',
Recording = 'recording',
Paused = 'paused',
@@ -94,19 +100,19 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
return this._props.PanelHeight() < 50;
} // used to collapse timeline when node is shrunk
@computed get links() {
- return LinkManager.Links(this.dataDoc);
+ return Doc.Links(this.dataDoc);
}
@computed get mediaState() {
- return this.dataDoc.mediaState as media_state;
+ return this.dataDoc.mediaState as mediaState;
+ }
+ set mediaState(value) {
+ this.dataDoc.mediaState = value;
}
@computed get path() {
// returns the path of the audio file
const path = Cast(this.Document[this.fieldKey], AudioField, null)?.url.href || '';
return path === nullAudio ? '' : path;
}
- set mediaState(value) {
- this.dataDoc.mediaState = value;
- }
@computed get timeline() {
return this._stackedTimeline;
@@ -117,17 +123,17 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
this._dropDisposer?.();
Object.values(this._disposers).forEach(disposer => disposer?.());
- this.mediaState === media_state.Recording && this.stopRecording();
+ this.mediaState === mediaState.Recording && this.stopRecording();
}
@action
componentDidMount() {
this._props.setContentViewBox?.(this);
if (this.path) {
- this.mediaState = media_state.Paused;
+ this.mediaState = mediaState.Paused;
this.setPlayheadTime(NumCast(this.layoutDoc.clipStart));
} else {
- this.mediaState = undefined as any as media_state;
+ this.mediaState = undefined as any as mediaState;
}
}
@@ -149,24 +155,24 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
this.Document,
this.dataDoc,
this.annotationKey,
- this._ele?.currentTime || Cast(this.Document._layout_currentTimecode, 'number', null) || (this.mediaState === media_state.Recording ? (Date.now() - (this.recordingStart || 0)) / 1000 : undefined),
+ this._ele?.currentTime || Cast(this.Document._layout_currentTimecode, 'number', null) || (this.mediaState === mediaState.Recording ? (Date.now() - (this.recordingStart || 0)) / 1000 : undefined),
undefined,
undefined,
addAsAnnotation
) || this.Document
: Docs.Create.ConfigDocument({ title: '#' + timecode, _timecodeToShow: timecode, annotationOn: this.Document });
- PresBox.pinDocView(anchor, { pinDocLayout: pinProps?.pinDocLayout, pinData: { ...(pinProps?.pinData ?? {}), temporal: true } }, this.Document);
+ PinDocView(anchor, { pinDocLayout: pinProps?.pinDocLayout, pinData: { ...(pinProps?.pinData ?? {}), temporal: true } }, this.Document);
return anchor;
};
// updates timecode and shows it in timeline, follows links at time
@action
timecodeChanged = () => {
- if (this.mediaState !== media_state.Recording && this._ele) {
+ if (this.mediaState !== mediaState.Recording && this._ele) {
this.links
.map(l => this.getLinkData(l))
- .forEach(({ la1, la2, linkTime }) => {
+ .forEach(({ la1, linkTime }) => {
if (linkTime > NumCast(this.layoutDoc._layout_currentTimecode) && linkTime < this._ele!.currentTime) {
Doc.linkFollowHighlight(la1);
}
@@ -180,7 +186,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
@action
playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false) => {
clearTimeout(this._play); // abort any previous clip ending
- if (Number.isNaN(this._ele?.duration)) {
+ if (isNaN(this._ele?.duration ?? Number.NaN)) {
// audio element isn't loaded yet... wait 1/2 second and try again
setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500);
} else if (this.timeline && this._ele && AudioBox.Enabled) {
@@ -191,7 +197,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
if (seekTimeInSeconds >= 0 && this.timeline.trimStart <= end && seekTimeInSeconds <= this.timeline.trimEnd) {
this._ele.currentTime = start;
this._ele.play();
- this.mediaState = media_state.Playing;
+ this.mediaState = mediaState.Playing;
this.addCurrentlyPlaying();
this._play = setTimeout(
() => {
@@ -213,9 +219,9 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
@action
removeCurrentlyPlaying = () => {
const docView = this.DocumentView?.();
- if (CollectionStackedTimeline.CurrentlyPlaying && docView) {
- const index = CollectionStackedTimeline.CurrentlyPlaying.indexOf(docView);
- index !== -1 && CollectionStackedTimeline.CurrentlyPlaying.splice(index, 1);
+ if (DocumentView.CurrentlyPlaying && docView) {
+ const index = DocumentView.CurrentlyPlaying.indexOf(docView);
+ index !== -1 && DocumentView.CurrentlyPlaying.splice(index, 1);
}
};
@@ -223,17 +229,17 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
@action
addCurrentlyPlaying = () => {
const docView = this.DocumentView?.();
- if (!CollectionStackedTimeline.CurrentlyPlaying) {
- CollectionStackedTimeline.CurrentlyPlaying = [];
+ if (!DocumentView.CurrentlyPlaying) {
+ DocumentView.CurrentlyPlaying = [];
}
- if (docView && CollectionStackedTimeline.CurrentlyPlaying.indexOf(docView) === -1) {
- CollectionStackedTimeline.CurrentlyPlaying.push(docView);
+ if (docView && DocumentView.CurrentlyPlaying.indexOf(docView) === -1) {
+ DocumentView.CurrentlyPlaying.push(docView);
}
};
// update the recording time
updateRecordTime = () => {
- if (this.mediaState === media_state.Recording) {
+ if (this.mediaState === mediaState.Recording) {
setTimeout(this.updateRecordTime, 30);
if (!this._paused) {
this.layoutDoc._layout_currentTimecode = (new Date().getTime() - this._recordStart - this._pausedTime) / 1000;
@@ -246,7 +252,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
this._stream = await navigator.mediaDevices.getUserMedia({ audio: true });
this._recorder = new MediaRecorder(this._stream);
this.dataDoc[this.fieldKey + '_recordingStart'] = new DateField();
- DocUtils.ActiveRecordings.push(this);
+ DocViewUtils.ActiveRecordings.push(this);
this._recorder.ondataavailable = async (e: any) => {
const [{ result }] = await Networking.UploadFilesToServer({ file: e.data });
if (!(result instanceof Error)) {
@@ -254,7 +260,9 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
}
};
this._recordStart = new Date().getTime();
- runInAction(() => (this.mediaState = media_state.Recording));
+ runInAction(() => {
+ this.mediaState = mediaState.Recording;
+ });
setTimeout(this.updateRecordTime);
this._recorder.start();
setTimeout(this.stopRecording, 60 * 60 * 1000); // stop after an hour
@@ -269,34 +277,34 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
const now = new Date().getTime();
this._paused && (this._pausedTime += now - this._pauseStart);
this.dataDoc[this.fieldKey + '_duration'] = (now - this._recordStart - this._pausedTime) / 1000;
- this.mediaState = media_state.Paused;
+ this.mediaState = mediaState.Paused;
this._stream?.getAudioTracks()[0].stop();
- const ind = DocUtils.ActiveRecordings.indexOf(this);
- ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1);
+ const ind = DocViewUtils.ActiveRecordings.indexOf(this);
+ ind !== -1 && DocViewUtils.ActiveRecordings.splice(ind, 1);
}
};
// context menu
- specificContextMenu = (e: React.MouseEvent): void => {
+ specificContextMenu = (): void => {
const funcs: ContextMenuProps[] = [];
funcs.push({
description: (this.layoutDoc.hideAnchors ? "Don't hide" : 'Hide') + ' anchors',
- event: e => (this.layoutDoc.hideAnchors = !this.layoutDoc.hideAnchors),
+ event: () => { this.layoutDoc.hideAnchors = !this.layoutDoc.hideAnchors; }, // prettier-ignore
icon: 'expand-arrows-alt',
});
funcs.push({
description: (this.layoutDoc.dontAutoFollowLinks ? '' : "Don't") + ' follow links when encountered',
- event: e => (this.layoutDoc.dontAutoFollowLinks = !this.layoutDoc.dontAutoFollowLinks),
+ event: () => { this.layoutDoc.dontAutoFollowLinks = !this.layoutDoc.dontAutoFollowLinks}, // prettier-ignore
icon: 'expand-arrows-alt',
});
funcs.push({
description: (this.layoutDoc.dontAutoPlayFollowedLinks ? '' : "Don't") + ' play when link is selected',
- event: e => (this.layoutDoc.dontAutoPlayFollowedLinks = !this.layoutDoc.dontAutoPlayFollowedLinks),
+ event: () => { this.layoutDoc.dontAutoPlayFollowedLinks = !this.layoutDoc.dontAutoPlayFollowedLinks; }, // prettier-ignore
icon: 'expand-arrows-alt',
});
funcs.push({
description: (this.layoutDoc.autoPlayAnchors ? "Don't auto" : 'Auto') + ' play anchors onClick',
- event: e => (this.layoutDoc.autoPlayAnchors = !this.layoutDoc.autoPlayAnchors),
+ event: () => { this.layoutDoc.autoPlayAnchors = !this.layoutDoc.autoPlayAnchors; }, // prettier-ignore
icon: 'expand-arrows-alt',
});
ContextMenu.Instance?.addItem({
@@ -342,9 +350,9 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
}
};
- IsPlaying = () => this.mediaState === media_state.Playing;
+ IsPlaying = () => this.mediaState === mediaState.Playing;
TogglePause = () => {
- if (this.mediaState === media_state.Paused) this.Play();
+ if (this.mediaState === mediaState.Paused) this.Play();
else this.pause();
};
// pause playback without removing from the playback list to allow user to play it again.
@@ -352,7 +360,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
pause = () => {
if (this._ele) {
this._ele.pause();
- this.mediaState = media_state.Paused;
+ this.mediaState = mediaState.Paused;
// if paused in the middle of playback, prevents restart on next play
if (!this._finished) clearTimeout(this._play);
@@ -434,7 +442,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
};
// plays link
- playLink = (link: Doc, options: FocusViewOptions) => {
+ playLink = (link: Doc /* , options: FocusViewOptions */) => {
if (link.annotationOn === this.Document) {
if (!this.layoutDoc.dontAutoPlayFollowedLinks) {
this.playFrom(this.timeline?.anchorStart(link) || 0, this.timeline?.anchorEnd(link));
@@ -460,13 +468,17 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
};
@action
- timelineWhenChildContentsActiveChanged = (isActive: boolean) => this._props.whenChildContentsActiveChanged((this._isAnyChildContentActive = isActive));
+ timelineWhenChildContentsActiveChanged = (isActive: boolean) => {
+ this._props.whenChildContentsActiveChanged((this._isAnyChildContentActive = isActive));
+ };
timelineScreenToLocal = () => this.ScreenToLocalBoxXf().translate(0, -AudioBox.topControlsHeight);
- setPlayheadTime = (time: number) => (this._ele!.currentTime /*= this.layoutDoc._layout_currentTimecode*/ = time);
+ setPlayheadTime = (time: number) => {
+ this._ele!.currentTime /* = this.layoutDoc._layout_currentTimecode */ = time;
+ };
- playing = () => this.mediaState === media_state.Playing;
+ playing = () => this.mediaState === mediaState.Playing;
isActiveChild = () => this._isAnyChildContentActive;
@@ -497,7 +509,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
e,
returnFalse,
returnFalse,
- action(e => {
+ action(() => {
if (this.timeline?.IsTrimming !== TrimScope.None) {
this.timeline?.CancelTrimming();
} else {
@@ -523,7 +535,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
e,
returnFalse,
returnFalse,
- action((e: PointerEvent, doubleTap?: boolean) => {
+ action((moveEv: PointerEvent, doubleTap?: boolean) => {
if (doubleTap) {
this.startTrim(TrimScope.All);
} else if (this.timeline) {
@@ -563,14 +575,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
setupTimelineDrop = (r: HTMLDivElement | null) => {
if (r && this.timeline) {
this._dropDisposer?.();
- this._dropDisposer = DragManager.MakeDropTarget(
- r,
- (e, de) => {
- const [xp, yp] = this.ScreenToLocalBoxXf().transformPoint(de.x, de.y);
- de.complete.docDragData && this.timeline?.internalDocDrop(e, de, de.complete.docDragData, xp);
- },
- this.layoutDoc
- );
+ this._dropDisposer = DragManager.MakeDropTarget(r, (e, de) => de.complete.docDragData && this.timeline?.internalDocDrop(e, de, de.complete.docDragData), this.layoutDoc);
}
};
@@ -581,7 +586,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
<div className="audiobox-dictation" onPointerDown={this.onFile}>
<FontAwesomeIcon size="2x" icon="file-alt" />
</div>
- {[media_state.Recording, media_state.Playing].includes(this.mediaState) ? (
+ {[mediaState.Recording, mediaState.Playing].includes(this.mediaState) ? (
<div className="recording-controls" onClick={e => e.stopPropagation()}>
<div className="record-button" onPointerDown={this.Record}>
<FontAwesomeIcon size="2x" icon="stop" />
@@ -614,31 +619,29 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
<div className="controls-left">
<div
className="audiobox-button"
- title={this.mediaState === media_state.Paused ? 'play' : 'pause'}
+ title={this.mediaState === mediaState.Paused ? 'play' : 'pause'}
onPointerDown={
- this.mediaState === media_state.Paused
+ this.mediaState === mediaState.Paused
? this.Play
: e => {
e.stopPropagation();
this.Pause();
}
}>
- <FontAwesomeIcon icon={this.mediaState === media_state.Paused ? 'play' : 'pause'} size={'1x'} />
+ <FontAwesomeIcon icon={this.mediaState === mediaState.Paused ? 'play' : 'pause'} size="1x" />
</div>
{!this.miniPlayer && (
<>
<Tooltip title={<>trim audio</>}>
<div className="audiobox-button" onPointerDown={this.onClipPointerDown}>
- <FontAwesomeIcon icon={this.timeline?.IsTrimming !== TrimScope.None ? 'check' : 'cut'} size={'1x'} />
+ <FontAwesomeIcon icon={this.timeline?.IsTrimming !== TrimScope.None ? 'check' : 'cut'} size="1x" />
</div>
</Tooltip>
- {this.timeline?.IsTrimming == TrimScope.None && !NumCast(this.layoutDoc.clipStart) && NumCast(this.layoutDoc.clipEnd) === this.rawDuration ? (
- <></>
- ) : (
- <Tooltip title={<>{this.timeline?.IsTrimming !== TrimScope.None ? 'Cancel trimming' : 'Edit original timeline'}</>}>
+ {this.timeline?.IsTrimming === TrimScope.None && !NumCast(this.layoutDoc.clipStart) && NumCast(this.layoutDoc.clipEnd) === this.rawDuration ? null : (
+ <Tooltip title={this.timeline?.IsTrimming !== TrimScope.None ? 'Cancel trimming' : 'Edit original timeline'}>
<div className="audiobox-button" onPointerDown={this.onResetPointerDown}>
- <FontAwesomeIcon icon={this.timeline?.IsTrimming !== TrimScope.None ? 'cancel' : 'arrows-left-right'} size={'1x'} />
+ <FontAwesomeIcon icon={this.timeline?.IsTrimming !== TrimScope.None ? 'cancel' : 'arrows-left-right'} size="1x" />
</div>
</Tooltip>
)}
@@ -705,9 +708,11 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
@computed get renderTimeline() {
return (
<CollectionStackedTimeline
- ref={action((r: CollectionStackedTimeline | null) => (this._stackedTimeline = r))}
+ ref={action((r: CollectionStackedTimeline | null) => {
+ this._stackedTimeline = r;
+ })}
+ // eslint-disable-next-line react/jsx-props-no-spreading
{...this._props}
- CollectionFreeFormDocumentView={undefined}
dataFieldKey={this.fieldKey}
fieldKey={this.annotationKey}
dictationKey={this.fieldKey + '_dictation'}
@@ -738,10 +743,13 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
// returns the html audio element
@computed get audio() {
return (
+ // eslint-disable-next-line jsx-a11y/media-has-caption
<audio
ref={this.setRef}
className={`audiobox-control${this._props.isContentActive() ? '-interactive' : ''}`}
- onLoadedData={action(e => this._ele?.duration && this._ele?.duration !== Infinity && (this.dataDoc[this.fieldKey + '_duration'] = this._ele.duration))}>
+ onLoadedData={action(() => {
+ this._ele?.duration && this._ele?.duration !== Infinity && (this.dataDoc[this.fieldKey + '_duration'] = this._ele.duration);
+ })}>
<source src={this.path} type="audio/mpeg" />
Not supported.
</audio>
@@ -756,3 +764,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
);
}
}
+
+Docs.Prototypes.TemplateMap.set(DocumentType.AUDIO, {
+ layout: { view: AudioBox, dataField: 'data' },
+ options: { acl: '', _height: 100, _layout_fitWidth: true, _layout_reflowHorizontal: true, _layout_reflowVertical: true, _layout_nativeDimEditable: true, systemIcon: 'BsFillVolumeUpFill' },
+});