diff options
author | bobzel <zzzman@gmail.com> | 2024-09-02 09:26:37 -0400 |
---|---|---|
committer | bobzel <zzzman@gmail.com> | 2024-09-02 09:26:37 -0400 |
commit | cda69e48361fce8d71a4dc66edd9dd976a27f52d (patch) | |
tree | 82b9a1a5967ae88a9534f89f7eaed3aeb289652f /src/client/views/nodes/AudioBox.tsx | |
parent | c01828308714874589d1f60c33ca59df4c656c0c (diff) | |
parent | a958577d4c27b276aa37484e3f895e196138b17c (diff) |
Merge branch 'master' into alyssa-starter
Diffstat (limited to 'src/client/views/nodes/AudioBox.tsx')
-rw-r--r-- | src/client/views/nodes/AudioBox.tsx | 57 |
1 files changed, 25 insertions, 32 deletions
diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 5d0ed7eab..63a126aec 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -1,5 +1,3 @@ -/* eslint-disable jsx-a11y/no-static-element-interactions */ -/* eslint-disable jsx-a11y/click-events-have-key-events */ import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; import { Tooltip } from '@mui/material'; import { action, computed, IReactionDisposer, makeObservable, observable, runInAction } from 'mobx'; @@ -7,7 +5,7 @@ import { observer } from 'mobx-react'; import * as React from 'react'; import { returnFalse, setupMoveUpEvents } from '../../../ClientUtils'; import { DateField } from '../../../fields/DateField'; -import { Doc } from '../../../fields/Doc'; +import { Doc, Opt } from '../../../fields/Doc'; import { DocData } from '../../../fields/DocSymbols'; import { ComputedField } from '../../../fields/ScriptField'; import { Cast, DateCast, DocCast, NumCast } from '../../../fields/Types'; @@ -45,9 +43,9 @@ import axios from 'axios'; */ // used as a wrapper class for MediaStream from MediaDevices API -declare class MediaRecorder { - constructor(e: any); // whatever MediaRecorder has -} +// declare class MediaRecorder { +// constructor(e: unknown); // whatever MediaRecorder has +// } export enum mediaState { PendingRecording = 'pendingRecording', @@ -62,9 +60,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { return FieldView.LayoutString(AudioBox, fieldKey); } - public static Enabled = false; - - constructor(props: any) { + constructor(props: FieldViewProps) { super(props); makeObservable(this); } @@ -75,12 +71,12 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { _dropDisposer?: DragManager.DragDropDisposer; _disposers: { [name: string]: IReactionDisposer } = {}; _ele: HTMLAudioElement | null = null; // <audio> ref - _recorder: any; // MediaRecorder + _recorder: Opt<MediaRecorder>; // MediaRecorder _recordStart = 0; _pauseStart = 0; // time when recording is paused (used to keep track of recording timecodes) _pausedTime = 0; _stream: MediaStream | undefined; // passed to MediaRecorder, records device input audio - _play: any = null; // timeout for playback + _play: NodeJS.Timeout | null = null; // timeout for playback @observable _stackedTimeline: CollectionStackedTimeline | null | undefined = undefined; // CollectionStackedTimeline ref @observable _finished: boolean = false; // has playback reached end of clip @@ -134,7 +130,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { this.mediaState = mediaState.Paused; this.setPlayheadTime(NumCast(this.layoutDoc.clipStart)); } else { - this.mediaState = undefined as any as mediaState; + this.mediaState = undefined as unknown as mediaState; } } @@ -186,11 +182,11 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { // play back the audio from seekTimeInSeconds, fullPlay tells whether clip is being played to end vs link range @action playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false) => { - clearTimeout(this._play); // abort any previous clip ending + this._play && clearTimeout(this._play); // abort any previous clip ending if (isNaN(this._ele?.duration ?? Number.NaN)) { // audio element isn't loaded yet... wait 1/2 second and try again setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500); - } else if (this.timeline && this._ele && AudioBox.Enabled) { + } else if (this.timeline && this._ele) { // trimBounds override requested playback bounds const end = Math.min(this.timeline.trimEnd, endTime ?? this.timeline.trimEnd); const start = Math.max(this.timeline.trimStart, seekTimeInSeconds); @@ -254,8 +250,12 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { this._recorder = new MediaRecorder(this._stream); this.dataDoc[this.fieldKey + '_recordingStart'] = new DateField(); DocViewUtils.ActiveRecordings.push(this); - this._recorder.ondataavailable = async (e: any) => { - const [{ result }] = await Networking.UploadFilesToServer({ file: e.data }); + this._recorder.ondataavailable = async (e: BlobEvent) => { + const file: Blob & { name?: string; lastModified?: number; webkitRelativePath?: string } = e.data; + file.name = ''; + file.lastModified = 0; + file.webkitRelativePath = ''; + const [{ result }] = await Networking.UploadFilesToServer({ file: file as Blob & { name: string; lastModified: number; webkitRelativePath: string } }); if (!(result instanceof Error)) { this.Document[this.fieldKey] = new AudioField(result.accessPaths.agnostic.client); this.Document.url = result.accessPaths.agnostic.client; @@ -348,9 +348,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { }; // for play button - Play = (e?: any) => { - e?.stopPropagation?.(); - + Play = () => { if (this.timeline && this._ele) { const eleTime = this._ele.currentTime; @@ -380,7 +378,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { this.mediaState = mediaState.Paused; // if paused in the middle of playback, prevents restart on next play - if (!this._finished) clearTimeout(this._play); + if (!this._finished && this._play) clearTimeout(this._play); } }; // pause playback and remove from playback list @@ -391,7 +389,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { }; // for dictation button, creates a text document for dictation - onFile = (e: any) => { + onFile = (e: React.PointerEvent) => { setupMoveUpEvents( this, e, @@ -436,7 +434,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { action(() => { this._pauseStart = new Date().getTime(); this._paused = true; - this._recorder.pause(); + this._recorder?.pause(); }), false ); @@ -452,7 +450,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { action(() => { this._paused = false; this._pausedTime += new Date().getTime() - this._pauseStart; - this._recorder.resume(); + this._recorder?.resume(); }), false ); @@ -637,14 +635,10 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { <div className="audiobox-button" title={this.mediaState === mediaState.Paused ? 'play' : 'pause'} - onPointerDown={ - this.mediaState === mediaState.Paused - ? this.Play - : e => { - e.stopPropagation(); - this.Pause(); - } - }> + onPointerDown={e => { + e.stopPropagation(); + this.mediaState === mediaState.Paused ? this.Play() : this.Pause(); + }}> <FontAwesomeIcon icon={this.mediaState === mediaState.Paused ? 'play' : 'pause'} size="1x" /> </div> @@ -760,7 +754,6 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { // returns the html audio element @computed get audio() { return ( - // eslint-disable-next-line jsx-a11y/media-has-caption <audio ref={this.setRef} className={`audiobox-control${this._props.isContentActive() ? '-interactive' : ''}`} |