import React = require("react"); import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { action, computed, IReactionDisposer, observable, reaction, runInAction } from "mobx"; import { observer } from "mobx-react"; import { DateField } from "../../../fields/DateField"; import { Doc, DocListCast, Opt } from "../../../fields/Doc"; import { documentSchema } from "../../../fields/documentSchemas"; import { makeInterface } from "../../../fields/Schema"; import { ComputedField } from "../../../fields/ScriptField"; import { Cast, NumCast } from "../../../fields/Types"; import { AudioField, nullAudio } from "../../../fields/URLField"; import { emptyFunction, formatTime, OmitKeys } from "../../../Utils"; import { DocUtils } from "../../documents/Documents"; import { Networking } from "../../Network"; import { CurrentUserUtils } from "../../util/CurrentUserUtils"; import { DragManager } from "../../util/DragManager"; import { SnappingManager } from "../../util/SnappingManager"; import { CollectionStackedTimeline } from "../collections/CollectionStackedTimeline"; import { ContextMenu } from "../ContextMenu"; import { ContextMenuProps } from "../ContextMenuItem"; import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from "../DocComponent"; import { Colors } from "../global/globalEnums"; import "./AudioBox.scss"; import { FieldView, FieldViewProps } from "./FieldView"; import { LinkDocPreview } from "./LinkDocPreview"; import e = require("connect-flash"); import { undoBatch } from "../../util/UndoManager"; declare class MediaRecorder { constructor(e: any); // whatever MediaRecorder has } type AudioDocument = makeInterface<[typeof documentSchema]>; const AudioDocument = makeInterface(documentSchema); @observer export class AudioBox extends ViewBoxAnnotatableComponent< ViewBoxAnnotatableProps & FieldViewProps, AudioDocument >(AudioDocument) { public static LayoutString(fieldKey: string) { return FieldView.LayoutString(AudioBox, fieldKey); } public static Enabled = false; static playheadWidth = 40; // width of playhead static heightPercent = 75; // height of timeline in percent of height of audioBox. static Instance: AudioBox; _disposers: { [name: string]: IReactionDisposer } = {}; _ele: HTMLAudioElement | null = null; _stackedTimeline = React.createRef(); _recorder: any; _recordStart = 0; _pauseStart = 0; _pauseEnd = 0; _pausedTime = 0; _stream: MediaStream | undefined; _start: number = 0; _play: any = null; _ended: boolean = false; @observable static _scrubTime = 0; @observable _markerEnd: number = 0; @observable _position: number = 0; @observable _waveHeight: Opt = this.layoutDoc._height; @observable _paused: boolean = false; @observable _trimming: boolean = false; @observable _trimStart: number = NumCast(this.layoutDoc.clipStart); @observable _trimEnd: number | undefined = Cast(this.layoutDoc.clipEnd, "number"); @computed get clipStart() { return NumCast(this.layoutDoc.clipStart); } @computed get clipEnd() { return NumCast(this.layoutDoc.clipEnd, this.duration); } @computed get trimStart() { return this._trimming ? this._trimStart : NumCast(this.layoutDoc.clipStart); } @computed get trimEnd() { return this._trimming && this._trimEnd !== undefined ? this._trimEnd : NumCast(this.layoutDoc.clipEnd, this.duration); } @computed get mediaState(): | undefined | "pendingRecording" | "recording" | "paused" | "playing" { return this.layoutDoc.mediaState as | undefined | "pendingRecording" | "recording" | "paused" | "playing"; } set mediaState(value) { this.layoutDoc.mediaState = value; } public static SetScrubTime = action((timeInMillisFrom1970: number) => { AudioBox._scrubTime = 0; AudioBox._scrubTime = timeInMillisFrom1970; }); @computed get recordingStart() { return Cast( this.dataDoc[this.props.fieldKey + "-recordingStart"], DateField )?.date.getTime(); } @computed get rawDuration() { return NumCast(this.dataDoc[`${this.fieldKey}-duration`]); } @computed get duration() { return NumCast(this.layoutDoc.clipEnd, NumCast(this.layoutDoc.clipStart) + NumCast(this.dataDoc[`${this.fieldKey}-duration`])) - NumCast(this.layoutDoc.clipStart); // NumCast(this.dataDoc[`${this.fieldKey}-duration`]); } @computed get trimDuration() { return this.trimEnd - this.trimStart; } @computed get anchorDocs() { return DocListCast(this.dataDoc[this.annotationKey]); } @computed get links() { return DocListCast(this.dataDoc.links); } @computed get pauseTime() { return this._pauseEnd - this._pauseStart; } // total time paused to update the correct time @computed get heightPercent() { return AudioBox.heightPercent; } constructor(props: Readonly) { super(props); AudioBox.Instance = this; } getLinkData(l: Doc) { let la1 = l.anchor1 as Doc; let la2 = l.anchor2 as Doc; const linkTime = this._stackedTimeline.current?.anchorStart(la2) || this._stackedTimeline.current?.anchorStart(la1) || 0; if (Doc.AreProtosEqual(la1, this.dataDoc)) { la1 = l.anchor2 as Doc; la2 = l.anchor1 as Doc; } return { la1, la2, linkTime }; } getAnchor = () => { return ( CollectionStackedTimeline.createAnchor( this.rootDoc, this.dataDoc, this.annotationKey, "_timecodeToShow" /* audioStart */, "_timecodeToHide" /* audioEnd */, this._ele?.currentTime || Cast(this.props.Document._currentTimecode, "number", null) || (this.mediaState === "recording" ? (Date.now() - (this.recordingStart || 0)) / 1000 : undefined) ) || this.rootDoc ); } componentWillUnmount() { this.dropDisposer?.(); Object.values(this._disposers).forEach((disposer) => disposer?.()); const ind = DocUtils.ActiveRecordings.indexOf(this); ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); } private dropDisposer?: DragManager.DragDropDisposer; @action componentDidMount() { this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link. this.mediaState = this.path ? "paused" : undefined; this.path && this.setAnchorTime(NumCast(this.layoutDoc.clipStart)); this.path && this.timecodeChanged(); this._disposers.triggerAudio = reaction( () => !LinkDocPreview.LinkInfo && this.props.renderDepth !== -1 ? NumCast(this.Document._triggerAudio, null) : undefined, (start) => start !== undefined && setTimeout(() => { this.playFrom(start); setTimeout(() => { this.Document._currentTimecode = start; this.Document._triggerAudio = undefined; }, 10); }), // wait for mainCont and try again to play { fireImmediately: true } ); this._disposers.audioStop = reaction( () => this.props.renderDepth !== -1 && !LinkDocPreview.LinkInfo ? Cast(this.Document._audioStop, "number", null) : undefined, (audioStop) => audioStop !== undefined && setTimeout(() => { this.Pause(); setTimeout(() => (this.Document._audioStop = undefined), 10); }), // wait for mainCont and try again to play { fireImmediately: true } ); } // for updating the timecode @action timecodeChanged = () => { const htmlEle = this._ele; if (this.mediaState !== "recording" && htmlEle) { this.links .map((l) => this.getLinkData(l)) .forEach(({ la1, la2, linkTime }) => { if ( linkTime > NumCast(this.layoutDoc._currentTimecode) && linkTime < htmlEle.currentTime ) { Doc.linkFollowHighlight(la1); } }); this.layoutDoc._currentTimecode = htmlEle.currentTime; } } // pause play back Pause = action(() => { this._ele!.pause(); this.mediaState = "paused"; }); // play audio for documents created during recording playFromTime = (absoluteTime: number) => { this.recordingStart && this.playFrom((absoluteTime - this.recordingStart) / 1000); } // play back the audio from time @action playFrom = (seekTimeInSeconds: number, endTime: number = this.trimEnd, fullPlay: boolean = false) => { clearTimeout(this._play); if (Number.isNaN(this._ele?.duration)) { setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500); } else if (this._ele && AudioBox.Enabled) { if (seekTimeInSeconds < 0) { if (seekTimeInSeconds > -1) { setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000); } else { this.Pause(); } } else if (this.trimStart <= endTime && seekTimeInSeconds <= this.trimEnd) { const start = Math.max(this.trimStart, seekTimeInSeconds); const end = Math.min(this.trimEnd, endTime); this._ele.currentTime = start; this._ele.play(); runInAction(() => (this.mediaState = "playing")); if (endTime !== this.duration) { this._play = setTimeout( () => { this._ended = fullPlay ? true : this._ended; this.Pause(); }, (end - start) * 1000 ); // use setTimeout to play a specific duration } } else { this.Pause(); } } } // update the recording time updateRecordTime = () => { if (this.mediaState === "recording") { setTimeout(this.updateRecordTime, 30); if (this._paused) { this._pausedTime += (new Date().getTime() - this._recordStart) / 1000; } else { this.layoutDoc._currentTimecode = (new Date().getTime() - this._recordStart - this.pauseTime) / 1000; } } } // starts recording recordAudioAnnotation = async () => { this._stream = await navigator.mediaDevices.getUserMedia({ audio: true }); this._recorder = new MediaRecorder(this._stream); this.dataDoc[this.props.fieldKey + "-recordingStart"] = new DateField( new Date() ); DocUtils.ActiveRecordings.push(this); this._recorder.ondataavailable = async (e: any) => { const [{ result }] = await Networking.UploadFilesToServer(e.data); if (!(result instanceof Error)) { this.props.Document[this.props.fieldKey] = new AudioField(result.accessPaths.agnostic.client); if (this._trimEnd === undefined) this._trimEnd = this.duration; } }; this._recordStart = new Date().getTime(); runInAction(() => (this.mediaState = "recording")); setTimeout(this.updateRecordTime, 0); this._recorder.start(); setTimeout(() => this._recorder && this.stopRecording(), 60 * 60 * 1000); // stop after an hour } // context menu specificContextMenu = (e: React.MouseEvent): void => { const funcs: ContextMenuProps[] = []; funcs.push({ description: (this.layoutDoc.hideAnchors ? "Don't hide" : "Hide") + " anchors", event: () => (this.layoutDoc.hideAnchors = !this.layoutDoc.hideAnchors), icon: "expand-arrows-alt", }); funcs.push({ description: (this.layoutDoc.dontAutoPlayFollowedLinks ? "" : "Don't") + " play when link is selected", event: () => (this.layoutDoc.dontAutoPlayFollowedLinks = !this.layoutDoc.dontAutoPlayFollowedLinks), icon: "expand-arrows-alt", }); funcs.push({ description: (this.layoutDoc.autoPlayAnchors ? "Don't auto play" : "Auto play") + " anchors onClick", event: () => (this.layoutDoc.autoPlayAnchors = !this.layoutDoc.autoPlayAnchors), icon: "expand-arrows-alt", }); ContextMenu.Instance?.addItem({ description: "Options...", subitems: funcs, icon: "asterisk", }); } // stops the recording stopRecording = action(() => { this._recorder.stop(); this._recorder = undefined; this.dataDoc[this.fieldKey + "-duration"] = (new Date().getTime() - this._recordStart - this.pauseTime) / 1000; this.mediaState = "paused"; this._trimEnd = this.duration; this.layoutDoc.clipStart = 0; this.layoutDoc.clipEnd = this.duration; this._stream?.getAudioTracks()[0].stop(); const ind = DocUtils.ActiveRecordings.indexOf(this); ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); }); // button for starting and stopping the recording recordClick = (e: React.MouseEvent) => { if (e.button === 0 && !e.ctrlKey) { this._recorder ? this.stopRecording() : this.recordAudioAnnotation(); e.stopPropagation(); } } // for play button Play = (e?: any) => { let start; if (this._ended || this._ele!.currentTime === this.duration) { start = NumCast(this.layoutDoc.clipStart); this._ended = false; } else { start = this._ele!.currentTime; } this.playFrom(start, this.trimEnd, true); e?.stopPropagation?.(); } // creates a text document for dictation onFile = (e: any) => { const newDoc = CurrentUserUtils.GetNewTextDoc( "", NumCast(this.props.Document.x), NumCast(this.props.Document.y) + NumCast(this.props.Document._height) + 10, NumCast(this.props.Document._width), 2 * NumCast(this.props.Document._height) ); Doc.GetProto(newDoc).recordingSource = this.dataDoc; Doc.GetProto(newDoc).recordingStart = ComputedField.MakeFunction( `self.recordingSource["${this.props.fieldKey}-recordingStart"]` ); Doc.GetProto(newDoc).mediaState = ComputedField.MakeFunction( "self.recordingSource.mediaState" ); this.props.addDocument?.(newDoc); e.stopPropagation(); } // ref for updating time setRef = (e: HTMLAudioElement | null) => { e?.addEventListener("timeupdate", this.timecodeChanged); e?.addEventListener("ended", this.Pause); this._ele = e; } // returns the path of the audio file @computed get path() { const field = Cast(this.props.Document[this.props.fieldKey], AudioField); const path = field instanceof AudioField ? field.url.href : ""; return path === nullAudio ? "" : path; } // returns the html audio element @computed get audio() { return ; } // pause the time during recording phase @action recordPause = (e: React.MouseEvent) => { this._pauseStart = new Date().getTime(); this._paused = true; this._recorder.pause(); e.stopPropagation(); } // continue the recording @action recordPlay = (e: React.MouseEvent) => { this._pauseEnd = new Date().getTime(); this._paused = false; this._recorder.resume(); e.stopPropagation(); } playing = () => this.mediaState === "playing"; playLink = (link: Doc) => { const stack = this._stackedTimeline.current; if (link.annotationOn === this.rootDoc) { if (!this.layoutDoc.dontAutoPlayFollowedLinks) { this.playFrom(stack?.anchorStart(link) || 0, stack?.anchorEnd(link)); } else { this._ele!.currentTime = this.layoutDoc._currentTimecode = stack?.anchorStart(link) || 0; } } else { this.links .filter((l) => l.anchor1 === link || l.anchor2 === link) .forEach((l) => { const { la1, la2 } = this.getLinkData(l); const startTime = stack?.anchorStart(la1) || stack?.anchorStart(la2); const endTime = stack?.anchorEnd(la1) || stack?.anchorEnd(la2); if (startTime !== undefined) { if (!this.layoutDoc.dontAutoPlayFollowedLinks) { endTime ? this.playFrom(startTime, endTime) : this.playFrom(startTime); } else { this._ele!.currentTime = this.layoutDoc._currentTimecode = startTime; } } }); } } // shows trim controls @action startTrim = () => { if (this.mediaState === "playing") { this.Pause(); } this._trimming = true; } // hides trim controls and displays new clip @undoBatch finishTrim = action(() => { if (this.mediaState === "playing") { this.Pause(); } this.layoutDoc.clipStart = this.trimStart; this.layoutDoc.clipEnd = this.trimEnd; this.setAnchorTime(Math.max(Math.min(this.trimEnd, this._ele!.currentTime), this.trimStart)); this._trimming = false; }); @action setStartTrim = (newStart: number) => { this._trimStart = newStart; } @action setEndTrim = (newEnd: number) => { this._trimEnd = newEnd; } isActiveChild = () => this._isAnyChildContentActive; timelineWhenChildContentsActiveChanged = (isActive: boolean) => this.props.whenChildContentsActiveChanged( runInAction(() => (this._isAnyChildContentActive = isActive)) ) timelineScreenToLocal = () => this.props .ScreenToLocalTransform() .translate( -AudioBox.playheadWidth, (-(100 - this.heightPercent) / 200) * this.props.PanelHeight() ) setAnchorTime = (time: number) => { (this._ele!.currentTime = this.layoutDoc._currentTimecode = time); } timelineHeight = () => (((this.props.PanelHeight() * this.heightPercent) / 100) * this.heightPercent) / 100 // panelHeight * heightPercent is player height. * heightPercent is timeline height (as per css inline) timelineWidth = () => this.props.PanelWidth() - AudioBox.playheadWidth; trimEndFunc = () => this.trimEnd; trimStartFunc = () => this.trimStart; trimDurationFunc = () => this.trimDuration; @computed get renderTimeline() { return ( ); } render() { const interactive = SnappingManager.GetIsDragging() || this.props.isContentActive() ? "-interactive" : ""; return (
{ if (r && this._stackedTimeline.current) { this.dropDisposer?.(); this.dropDisposer = DragManager.MakeDropTarget(r, (e, de) => { const [xp, yp] = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); de.complete.docDragData && this._stackedTimeline.current!.internalDocDrop(e, de, de.complete.docDragData, xp); } , this.layoutDoc, undefined); } }} className="audiobox-container" onContextMenu={this.specificContextMenu} onClick={ !this.path && !this._recorder ? this.recordAudioAnnotation : undefined } style={{ pointerEvents: this.props.layerProvider?.(this.layoutDoc) === false ? "none" : undefined, }} > {!this.path ? (
{this.mediaState === "recording" || this.mediaState === "paused" ? (
e.stopPropagation()}>
{formatTime( Math.round(NumCast(this.layoutDoc._currentTimecode)) )}
) : (
RECORD
)}
) : (
{" "}
{this.renderTimeline}
{this.audio}
{this._trimming ? formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode))) : formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this.trimStart)))}
{this._trimming || !this._trimEnd ? formatTime(Math.round(NumCast(this.duration))) : formatTime(Math.round(NumCast(this.trimDuration)))}
)}
); } }