aboutsummaryrefslogtreecommitdiff
path: root/src/client/views/nodes/AudioBox.tsx
diff options
context:
space:
mode:
Diffstat (limited to 'src/client/views/nodes/AudioBox.tsx')
-rw-r--r--src/client/views/nodes/AudioBox.tsx269
1 files changed, 256 insertions, 13 deletions
diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx
index be6ae630f..86bd23b67 100644
--- a/src/client/views/nodes/AudioBox.tsx
+++ b/src/client/views/nodes/AudioBox.tsx
@@ -2,24 +2,267 @@ import React = require("react");
import { FieldViewProps, FieldView } from './FieldView';
import { observer } from "mobx-react";
import "./AudioBox.scss";
-import { Cast } from "../../../new_fields/Types";
-import { AudioField } from "../../../new_fields/URLField";
+import { Cast, DateCast, NumCast } from "../../../new_fields/Types";
+import { AudioField, nullAudio } from "../../../new_fields/URLField";
+import { DocExtendableComponent } from "../DocComponent";
+import { makeInterface, createSchema } from "../../../new_fields/Schema";
+import { documentSchema } from "../../../new_fields/documentSchemas";
+import { Utils, returnTrue, emptyFunction, returnOne, returnTransparent } from "../../../Utils";
+import { RouteStore } from "../../../server/RouteStore";
+import { runInAction, observable, reaction, IReactionDisposer, computed, action } from "mobx";
+import { DateField } from "../../../new_fields/DateField";
+import { SelectionManager } from "../../util/SelectionManager";
+import { Doc, DocListCast } from "../../../new_fields/Doc";
+import { ContextMenuProps } from "../ContextMenuItem";
+import { ContextMenu } from "../ContextMenu";
+import { Id } from "../../../new_fields/FieldSymbols";
+import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
+import { DocumentView } from "./DocumentView";
+
+interface Window {
+ MediaRecorder: MediaRecorder;
+}
+
+declare class MediaRecorder {
+ // whatever MediaRecorder has
+ constructor(e: any);
+}
+export const audioSchema = createSchema({
+ playOnSelect: "boolean"
+});
+
+type AudioDocument = makeInterface<[typeof documentSchema, typeof audioSchema]>;
+const AudioDocument = makeInterface(documentSchema, audioSchema);
-const defaultField: AudioField = new AudioField(new URL("http://techslides.com/demos/samples/sample.mp3"));
@observer
-export class AudioBox extends React.Component<FieldViewProps> {
+export class AudioBox extends DocExtendableComponent<FieldViewProps, AudioDocument>(AudioDocument) {
+ public static LayoutString(fieldKey: string) { return FieldView.LayoutString(AudioBox, fieldKey); }
+ public static Enabled = false;
+
+ _linkPlayDisposer: IReactionDisposer | undefined;
+ _reactionDisposer: IReactionDisposer | undefined;
+ _scrubbingDisposer: IReactionDisposer | undefined;
+ _ele: HTMLAudioElement | null = null;
+ _recorder: any;
+ _recordStart = 0;
+
+ @observable private static _scrubTime = 0;
+ @observable private _audioState: "unrecorded" | "recording" | "recorded" = "unrecorded";
+ @observable private _playing = false;
+ public static SetScrubTime = action((timeInMillisFrom1970: number) => AudioBox._scrubTime = timeInMillisFrom1970);
+ public static ActiveRecordings: Doc[] = [];
+
+ componentDidMount() {
+ runInAction(() => this._audioState = this.path ? "recorded" : "unrecorded");
+ this._linkPlayDisposer = reaction(() => this.layoutDoc.scrollToLinkID,
+ scrollLinkId => {
+ scrollLinkId && DocListCast(this.dataDoc.links).filter(l => l[Id] === scrollLinkId).map(l => {
+ let la1 = l.anchor1 as Doc;
+ let linkTime = Doc.AreProtosEqual(la1, this.dataDoc) ? NumCast(l.anchor1Timecode) : NumCast(l.anchor2Timecode);
+ setTimeout(() => { this.playFrom(linkTime); Doc.linkFollowHighlight(l); }, 250);
+ });
+ scrollLinkId && Doc.SetInPlace(this.layoutDoc, "scrollToLinkID", undefined, false);
+ }, { fireImmediately: true });
+ this._reactionDisposer = reaction(() => SelectionManager.SelectedDocuments(),
+ selected => {
+ let sel = selected.length ? selected[0].props.Document : undefined;
+ this.Document.playOnSelect && sel && !Doc.AreProtosEqual(sel, this.props.Document) && this.playFrom(DateCast(sel.creationTime).date.getTime());
+ });
+ this._scrubbingDisposer = reaction(() => AudioBox._scrubTime, timeInMillisecondsFrom1970 => {
+ let start = this.extensionDoc && DateCast(this.extensionDoc.recordingStart);
+ start && this.playFrom((timeInMillisecondsFrom1970 - start.date.getTime()) / 1000);
+ });
+ }
+
+ timecodeChanged = () => {
+ const htmlEle = this._ele;
+ if (this._audioState === "recorded" && htmlEle) {
+ htmlEle.duration && htmlEle.duration !== Infinity && runInAction(() => this.dataDoc.duration = htmlEle.duration);
+ DocListCast(this.dataDoc.links).map(l => {
+ let la1 = l.anchor1 as Doc;
+ let linkTime = NumCast(l.anchor2Timecode);
+ if (Doc.AreProtosEqual(la1, this.dataDoc)) {
+ la1 = l.anchor2 as Doc;
+ linkTime = NumCast(l.anchor1Timecode);
+ }
+ if (linkTime > NumCast(this.Document.currentTimecode) && linkTime < htmlEle.currentTime) {
+ Doc.linkFollowHighlight(la1);
+ }
+ });
+ this.Document.currentTimecode = htmlEle.currentTime;
+ }
+ }
+
+ pause = action(() => {
+ this._ele!.pause();
+ this._playing = false;
+ });
+
+ playFrom = (seekTimeInSeconds: number) => {
+ if (this._ele && AudioBox.Enabled) {
+ if (seekTimeInSeconds < 0) {
+ this.pause();
+ } else if (seekTimeInSeconds <= this._ele.duration) {
+ this._ele.currentTime = seekTimeInSeconds;
+ this._ele.play();
+ runInAction(() => this._playing = true);
+ } else {
+ this.pause();
+ }
+ }
+ }
+
+ componentWillUnmount() {
+ this._reactionDisposer && this._reactionDisposer();
+ this._linkPlayDisposer && this._linkPlayDisposer();
+ this._scrubbingDisposer && this._scrubbingDisposer();
+ }
+
+
+ updateRecordTime = () => {
+ if (this._audioState === "recording") {
+ setTimeout(this.updateRecordTime, 30);
+ this.Document.currentTimecode = (new Date().getTime() - this._recordStart) / 1000;
+ }
+ }
- public static LayoutString() { return FieldView.LayoutString(AudioBox); }
+ recordAudioAnnotation = () => {
+ let gumStream: any;
+ let self = this;
+ const extensionDoc = this.extensionDoc;
+ extensionDoc && navigator.mediaDevices.getUserMedia({
+ audio: true
+ }).then(function (stream) {
+ gumStream = stream;
+ self._recorder = new MediaRecorder(stream);
+ extensionDoc.recordingStart = new DateField(new Date());
+ AudioBox.ActiveRecordings.push(self.props.Document);
+ self._recorder.ondataavailable = async function (e: any) {
+ const formData = new FormData();
+ formData.append("file", e.data);
+ const res = await fetch(Utils.prepend(RouteStore.upload), {
+ method: 'POST',
+ body: formData
+ });
+ const files = await res.json();
+ const url = Utils.prepend(files[0].path);
+ // upload to server with known URL
+ self.props.Document[self.props.fieldKey] = new AudioField(url);
+ };
+ runInAction(() => self._audioState = "recording");
+ self._recordStart = new Date().getTime();
+ setTimeout(self.updateRecordTime, 0);
+ self._recorder.start();
+ setTimeout(() => {
+ self.stopRecording();
+ gumStream.getAudioTracks()[0].stop();
+ }, 60 * 60 * 1000); // stop after an hour?
+ });
+ }
+
+ specificContextMenu = (e: React.MouseEvent): void => {
+ let funcs: ContextMenuProps[] = [];
+ funcs.push({ description: (this.Document.playOnSelect ? "Don't play" : "Play") + " when document selected", event: () => this.Document.playOnSelect = !this.Document.playOnSelect, icon: "expand-arrows-alt" });
+
+ ContextMenu.Instance.addItem({ description: "Audio Funcs...", subitems: funcs, icon: "asterisk" });
+ }
+
+ stopRecording = action(() => {
+ this._recorder.stop();
+ this.dataDoc.duration = (new Date().getTime() - this._recordStart) / 1000;
+ this._audioState = "recorded";
+ let ind = AudioBox.ActiveRecordings.indexOf(this.props.Document);
+ ind !== -1 && (AudioBox.ActiveRecordings.splice(ind, 1));
+ });
+
+ recordClick = (e: React.MouseEvent) => {
+ if (e.button === 0 && !e.ctrlKey) {
+ this._recorder ? this.stopRecording() : this.recordAudioAnnotation();
+ e.stopPropagation();
+ }
+ }
+
+ onPlay = (e: any) => {
+ this.playFrom(this._ele!.paused ? this._ele!.currentTime : -1);
+ e.stopPropagation();
+ }
+ onStop = (e: any) => {
+ this.pause();
+ this._ele!.currentTime = 0;
+ e.stopPropagation();
+ }
+
+ setRef = (e: HTMLAudioElement | null) => {
+ e && e.addEventListener("timeupdate", this.timecodeChanged);
+ e && e.addEventListener("ended", this.pause);
+ this._ele = e;
+ }
+
+ @computed get path() {
+ let field = Cast(this.props.Document[this.props.fieldKey], AudioField);
+ let path = (field instanceof AudioField) ? field.url.href : "";
+ return path === nullAudio ? "" : path;
+ }
+
+ @computed get audio() {
+ let interactive = this.active() ? "-interactive" : "";
+ return <audio ref={this.setRef} className={`audiobox-control${interactive}`}>
+ <source src={this.path} type="audio/mpeg" />
+ Not supported.
+ </audio>;
+ }
render() {
- let field = Cast(this.props.Document[this.props.fieldKey], AudioField, defaultField);
- let path = field.url.href;
-
- return (
- <audio controls className="audiobox-cont" style={{ pointerEvents: "all" }}>
- <source src={path} type="audio/mpeg" />
- Not supported.
- </audio>
+ let interactive = this.active() ? "-interactive" : "";
+ return (!this.extensionDoc ? (null) :
+ <div className={`audiobox-container`} onContextMenu={this.specificContextMenu}
+ onClick={!this.path ? this.recordClick : undefined}>
+ <div className="audiobox-handle"></div>
+ {!this.path ?
+ <button className={`audiobox-record${interactive}`} style={{ backgroundColor: this._audioState === "recording" ? "red" : "black" }}>
+ {this._audioState === "recording" ? "STOP" : "RECORD"}
+ </button> :
+ <div className="audiobox-controls">
+ <div className="audiobox-player" onClick={this.onPlay}>
+ <div className="audiobox-playhead"> <FontAwesomeIcon style={{ width: "100%" }} icon={this._playing ? "pause" : "play"} size={this.props.PanelHeight() < 36 ? "1x" : "2x"} /></div>
+ <div className="audiobox-playhead" onClick={this.onStop}><FontAwesomeIcon style={{ width: "100%" }} icon="stop" size={this.props.PanelHeight() < 36 ? "1x" : "2x"} /></div>
+ <div className="audiobox-timeline" onClick={e => e.stopPropagation()}
+ onPointerDown={e => {
+ if (e.button === 0 && !e.ctrlKey) {
+ let rect = (e.target as any).getBoundingClientRect();
+ this._ele!.currentTime = this.Document.currentTimecode = (e.clientX - rect.x) / rect.width * NumCast(this.dataDoc.duration);
+ this.pause();
+ e.stopPropagation();
+ }
+ }} >
+ {DocListCast(this.dataDoc.links).map((l, i) => {
+ let la1 = l.anchor1 as Doc;
+ let la2 = l.anchor2 as Doc;
+ let linkTime = NumCast(l.anchor2Timecode);
+ if (Doc.AreProtosEqual(la1, this.dataDoc)) {
+ la1 = l.anchor2 as Doc;
+ la2 = l.anchor1 as Doc;
+ linkTime = NumCast(l.anchor1Timecode);
+ }
+ return !linkTime ? (null) :
+ <div className={this.props.PanelHeight() < 32 ? "audiobox-marker-minicontainer" : "audiobox-marker-container"} key={l[Id]} style={{ left: `${linkTime / NumCast(this.dataDoc.duration, 1) * 100}%` }}>
+ <div className={this.props.PanelHeight() < 32 ? "audioBox-linker-mini" : "audioBox-linker"} key={"linker" + i}>
+ <DocumentView {...this.props} Document={l} layoutKey={Doc.LinkEndpoint(l, la2)}
+ parentActive={returnTrue} bringToFront={emptyFunction} zoomToScale={emptyFunction} getScale={returnOne}
+ backgroundColor={returnTransparent} />
+ </div>
+ <div key={i} className="audiobox-marker" onPointerEnter={() => Doc.linkFollowHighlight(la1)}
+ onPointerDown={e => { if (e.button === 0 && !e.ctrlKey) { this.playFrom(linkTime); e.stopPropagation(); } }}
+ onClick={e => { if (e.button === 0 && !e.ctrlKey) { this.pause(); e.stopPropagation(); } }} />
+ </div>;
+ })}
+ <div className="audiobox-current" style={{ left: `${NumCast(this.Document.currentTimecode) / NumCast(this.dataDoc.duration, 1) * 100}%` }} />
+ {this.audio}
+ </div>
+ </div>
+ </div>
+ }
+ </div>
);
}
} \ No newline at end of file