import React = require("react"); import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { action, computed, IReactionDisposer, observable, runInAction } from "mobx"; import { observer } from "mobx-react"; import { DateField } from "../../../fields/DateField"; import { Doc, DocListCast } from "../../../fields/Doc"; import { documentSchema } from "../../../fields/documentSchemas"; import { makeInterface } from "../../../fields/Schema"; import { ComputedField } from "../../../fields/ScriptField"; import { Cast, DateCast, NumCast } from "../../../fields/Types"; import { AudioField, nullAudio } from "../../../fields/URLField"; import { emptyFunction, formatTime, OmitKeys, returnFalse, setupMoveUpEvents } from "../../../Utils"; import { DocUtils } from "../../documents/Documents"; import { Networking } from "../../Network"; import { CurrentUserUtils } from "../../util/CurrentUserUtils"; import { DragManager } from "../../util/DragManager"; import { undoBatch } from "../../util/UndoManager"; import { CollectionStackedTimeline, TrimScope } from "../collections/CollectionStackedTimeline"; import { ContextMenu } from "../ContextMenu"; import { ContextMenuProps } from "../ContextMenuItem"; import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from "../DocComponent"; import "./AudioBox.scss"; import { FieldView, FieldViewProps } from "./FieldView"; import { timeStamp } from "console"; declare class MediaRecorder { constructor(e: any); // whatever MediaRecorder has } type AudioDocument = makeInterface<[typeof documentSchema]>; const AudioDocument = makeInterface(documentSchema); enum media_state { PendingRecording = "pendingRecording", Recording = "recording", Paused = "paused", Playing = "playing" } @observer export class AudioBox extends ViewBoxAnnotatableComponent(AudioDocument) { public static LayoutString(fieldKey: string) { return FieldView.LayoutString(AudioBox, fieldKey); } public static SetScrubTime = action((timeInMillisFrom1970: number) => { AudioBox._scrubTime = 0; AudioBox._scrubTime = timeInMillisFrom1970; }); public static Enabled = false; static topControlsHeight = 30; // width of playhead static bottomControlsHeight = 20; // height of timeline in percent of height of audioBox. static zoomInterval = 0.1; @observable static _scrubTime = 0; _dropDisposer?: DragManager.DragDropDisposer; _disposers: { [name: string]: IReactionDisposer } = {}; _ele: HTMLAudioElement | null = null; _stackedTimeline = React.createRef(); _recorder: any; _recordStart = 0; _pauseStart = 0; _pauseEnd = 0; _pausedTime = 0; _stream: MediaStream | undefined; _play: any = null; @observable _volume: number = 1; @observable _muted: boolean = false; @observable _paused: boolean = false; @computed get recordingStart() { return DateCast(this.dataDoc[this.fieldKey + "-recordingStart"])?.date.getTime(); } @computed get rawDuration() { return NumCast(this.dataDoc[`${this.fieldKey}-duration`]); } @computed get links() { return DocListCast(this.dataDoc.links); } @computed get pauseTime() { return this._pauseEnd - this._pauseStart; } // total time paused to update the correct time @computed get mediaState() { return this.layoutDoc.mediaState as media_state; } @computed get path() { // returns the path of the audio file const path = Cast(this.props.Document[this.fieldKey], AudioField, null)?.url.href || ""; return path === nullAudio ? "" : path; } set mediaState(value) { this.layoutDoc.mediaState = value; } get timeline() { return this._stackedTimeline.current; } // can't be computed since it's not observable componentWillUnmount() { this._dropDisposer?.(); Object.values(this._disposers).forEach((disposer) => disposer?.()); const ind = DocUtils.ActiveRecordings.indexOf(this); ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); } @action componentDidMount() { this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link. if (this.path) { this.mediaState = media_state.Paused; this.setPlayheadTime(NumCast(this.layoutDoc.clipStart)); this.timecodeChanged(); } else { this.mediaState = undefined as any as media_state; } } getLinkData(l: Doc) { let la1 = l.anchor1 as Doc; let la2 = l.anchor2 as Doc; const linkTime = this.timeline?.anchorStart(la2) || this.timeline?.anchorStart(la1) || 0; if (Doc.AreProtosEqual(la1, this.dataDoc)) { la1 = l.anchor2 as Doc; la2 = l.anchor1 as Doc; } return { la1, la2, linkTime }; } getAnchor = () => { return CollectionStackedTimeline.createAnchor( this.rootDoc, this.dataDoc, this.annotationKey, "_timecodeToShow" /* audioStart */, "_timecodeToHide" /* audioEnd */, this._ele?.currentTime || Cast(this.props.Document._currentTimecode, "number", null) || (this.mediaState === media_state.Recording ? (Date.now() - (this.recordingStart || 0)) / 1000 : undefined) ) || this.rootDoc; } // for updating the timecode @action timecodeChanged = () => { if (this.mediaState !== media_state.Recording && this._ele) { this.links .map(l => this.getLinkData(l)) .forEach(({ la1, la2, linkTime }) => { if (linkTime > NumCast(this.layoutDoc._currentTimecode) && linkTime < this._ele!.currentTime) { Doc.linkFollowHighlight(la1); } }); this.layoutDoc._currentTimecode = this._ele.currentTime; } } // play back the audio from time @action playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false) => { clearTimeout(this._play); // abort any previous clip ending if (Number.isNaN(this._ele?.duration)) { // audio element isn't loaded yet... wait 1/2 second and try again setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500); } else if (this.timeline && this._ele && AudioBox.Enabled) { const end = Math.min(this.timeline.trimEnd, endTime ?? this.timeline.trimEnd); const start = Math.max(this.timeline.trimStart, seekTimeInSeconds); if (seekTimeInSeconds >= 0 && this.timeline.trimStart <= end && seekTimeInSeconds <= this.timeline.trimEnd) { this._ele.currentTime = start; this._ele.play(); this.mediaState = media_state.Playing; this._play = setTimeout( () => { this.Pause(); if (fullPlay) this.setPlayheadTime(this.timeline!.trimStart); }, (end - start) * 1000); } else { this.Pause(); } } } // update the recording time updateRecordTime = () => { if (this.mediaState === media_state.Recording) { setTimeout(this.updateRecordTime, 30); if (this._paused) { this._pausedTime += (new Date().getTime() - this._recordStart) / 1000; } else { this.layoutDoc._currentTimecode = (new Date().getTime() - this._recordStart - this.pauseTime) / 1000; } } } // starts recording recordAudioAnnotation = async () => { this._stream = await navigator.mediaDevices.getUserMedia({ audio: true }); this._recorder = new MediaRecorder(this._stream); this.dataDoc[this.fieldKey + "-recordingStart"] = new DateField(); DocUtils.ActiveRecordings.push(this); this._recorder.ondataavailable = async (e: any) => { const [{ result }] = await Networking.UploadFilesToServer(e.data); if (!(result instanceof Error)) { this.props.Document[this.fieldKey] = new AudioField(result.accessPaths.agnostic.client); } }; this._recordStart = new Date().getTime(); runInAction(() => this.mediaState = media_state.Recording); setTimeout(this.updateRecordTime); this._recorder.start(); setTimeout(this.stopRecording, 60 * 60 * 1000); // stop after an hour } @action stopRecording = () => { if (this._recorder) { this._recorder.stop(); this._recorder = undefined; this.dataDoc[this.fieldKey + "-duration"] = (new Date().getTime() - this._recordStart - this.pauseTime) / 1000; this.mediaState = media_state.Paused; this._stream?.getAudioTracks()[0].stop(); const ind = DocUtils.ActiveRecordings.indexOf(this); ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); } } // context menu specificContextMenu = (e: React.MouseEvent): void => { const funcs: ContextMenuProps[] = []; funcs.push({ description: (this.layoutDoc.hideAnchors ? "Don't hide" : "Hide") + " anchors", event: e => this.layoutDoc.hideAnchors = !this.layoutDoc.hideAnchors, icon: "expand-arrows-alt", }); funcs.push({ description: (this.layoutDoc.dontAutoFollowLinks ? "" : "Don't") + " follow links when encountered", event: e => this.layoutDoc.dontAutoFollowLinks = !this.layoutDoc.dontAutoFollowLinks, icon: "expand-arrows-alt", }); funcs.push({ description: (this.layoutDoc.dontAutoPlayFollowedLinks ? "" : "Don't") + " play when link is selected", event: e => this.layoutDoc.dontAutoPlayFollowedLinks = !this.layoutDoc.dontAutoPlayFollowedLinks, icon: "expand-arrows-alt", }); funcs.push({ description: (this.layoutDoc.autoPlayAnchors ? "Don't auto" : "Auto") + " play anchors onClick", event: e => this.layoutDoc.autoPlayAnchors = !this.layoutDoc.autoPlayAnchors, icon: "expand-arrows-alt", }); ContextMenu.Instance?.addItem({ description: "Options...", subitems: funcs, icon: "asterisk", }); } // button for starting and stopping the recording Record = (e: React.MouseEvent) => { if (e.button === 0 && !e.ctrlKey) { this._recorder ? this.stopRecording() : this.recordAudioAnnotation(); e.stopPropagation(); } } // for play button Play = (e?: any) => { const eleTime = this._ele?.currentTime || 0; const start = eleTime === this.timeline?.trimEnd ? this.timeline.trimStart : eleTime; this.playFrom(start, undefined, true); e?.stopPropagation?.(); } // pause play back @action Pause = () => { this._ele?.pause(); this.mediaState = media_state.Paused; // clearTimeout(this._play); // stops clip from jumping back to beginning } // creates a text document for dictation onFile = (e: any) => { const newDoc = CurrentUserUtils.GetNewTextDoc( "", NumCast(this.rootDoc.x), NumCast(this.rootDoc.y) + NumCast(this.layoutDoc._height) + 10, NumCast(this.layoutDoc._width), 2 * NumCast(this.layoutDoc._height) ); Doc.GetProto(newDoc).recordingSource = this.dataDoc; Doc.GetProto(newDoc).recordingStart = ComputedField.MakeFunction( `self.recordingSource["${this.fieldKey}-recordingStart"]` ); Doc.GetProto(newDoc).mediaState = ComputedField.MakeFunction( "self.recordingSource.mediaState" ); this.props.addDocument?.(newDoc); e.stopPropagation(); } // ref for updating time setRef = (e: HTMLAudioElement | null) => { e?.addEventListener("timeupdate", this.timecodeChanged); e?.addEventListener("ended", this.Pause); this._ele = e; } // pause the time during recording phase @action recordPause = (e: React.MouseEvent) => { this._pauseStart = new Date().getTime(); this._paused = true; this._recorder.pause(); e.stopPropagation(); } // continue the recording @action recordPlay = (e: React.MouseEvent) => { this._pauseEnd = new Date().getTime(); this._paused = false; this._recorder.resume(); e.stopPropagation(); } playLink = (link: Doc) => { if (link.annotationOn === this.rootDoc) { if (!this.layoutDoc.dontAutoPlayFollowedLinks) { this.playFrom(this.timeline?.anchorStart(link) || 0, this.timeline?.anchorEnd(link)); } else { this._ele!.currentTime = this.layoutDoc._currentTimecode = this.timeline?.anchorStart(link) || 0; } } else { this.links .filter((l) => l.anchor1 === link || l.anchor2 === link) .forEach((l) => { const { la1, la2 } = this.getLinkData(l); const startTime = this.timeline?.anchorStart(la1) || this.timeline?.anchorStart(la2); const endTime = this.timeline?.anchorEnd(la1) || this.timeline?.anchorEnd(la2); if (startTime !== undefined) { if (!this.layoutDoc.dontAutoPlayFollowedLinks) { this.playFrom(startTime, endTime); } else { this._ele!.currentTime = this.layoutDoc._currentTimecode = startTime; } } }); } } @action timelineWhenChildContentsActiveChanged = (isActive: boolean) => this.props.whenChildContentsActiveChanged(this._isAnyChildContentActive = isActive) timelineScreenToLocal = () => this.props.ScreenToLocalTransform().translate(0, -AudioBox.bottomControlsHeight) setPlayheadTime = (time: number) => this._ele!.currentTime = this.layoutDoc._currentTimecode = time; playing = () => this.mediaState === media_state.Playing; isActiveChild = () => this._isAnyChildContentActive; timelineWidth = () => this.props.PanelWidth(); timelineHeight = () => (this.props.PanelHeight() - (AudioBox.topControlsHeight + AudioBox.bottomControlsHeight)) @undoBatch finishTrim = () => { // hides trim controls and displays new clip this.Pause(); this.setPlayheadTime(Math.max(Math.min(this.timeline?.trimEnd || 0, this._ele!.currentTime), this.timeline?.trimStart || 0)); this.timeline?.StopTrimming(); } startTrim = (scope: TrimScope) => { this.Pause(); this.timeline?.StartTrimming(scope); } onClipPointerDown = (e: React.PointerEvent) => { e.stopPropagation(); this.timeline && setupMoveUpEvents(this, e, returnFalse, returnFalse, action((e: PointerEvent, doubleTap?: boolean) => { if (doubleTap) { this.startTrim(TrimScope.All); } else if (this.timeline) { this.Pause(); this.timeline.IsTrimming !== TrimScope.None ? this.finishTrim() : this.startTrim(TrimScope.Clip); } })); } zoom = (zoom: number) => { this.timeline?.setZoom(zoom); } @action setVolume = (volume: number) => { if (this._ele) { this._volume = volume; this._ele.volume = volume; } } @action toggleMute = (e: React.PointerEvent) => { e.stopPropagation(); if (this._ele) { this._muted = !this._muted; this._ele.muted = this._muted; } } setupTimelineDrop = (r: HTMLDivElement | null) => { if (r && this.timeline) { this._dropDisposer?.(); this._dropDisposer = DragManager.MakeDropTarget(r, (e, de) => { const [xp, yp] = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); de.complete.docDragData && this.timeline!.internalDocDrop(e, de, de.complete.docDragData, xp); }, this.layoutDoc, undefined); } } @computed get recordingControls() { return
{[media_state.Recording, media_state.Playing].includes(this.mediaState) ?
e.stopPropagation()}>
{formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode)))}
:
RECORD
}
} @computed get playbackControls() { return
{ e.stopPropagation(); }} onChange={(e: React.ChangeEvent) => { this.setVolume(Number(e.target.value)) }} />
{this.renderTimeline}
{this.audio}
{this.timeline && formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this.timeline.clipStart)))}
{ e.stopPropagation(); }} onChange={(e: React.ChangeEvent) => { this.zoom(Number(e.target.value)); }} />
{this.timeline && formatTime(Math.round(NumCast(this.timeline?.clipDuration)))}
} @computed get renderTimeline() { return ( ); } // returns the html audio element @computed get audio() { return ; } render() { return
{!this.path ? this.recordingControls : this.playbackControls}
; } }