aboutsummaryrefslogtreecommitdiff
path: root/src/client/views/nodes/AudioBox.tsx
diff options
context:
space:
mode:
Diffstat (limited to 'src/client/views/nodes/AudioBox.tsx')
-rw-r--r--src/client/views/nodes/AudioBox.tsx837
1 files changed, 369 insertions, 468 deletions
diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx
index 93377f1dc..c2b4d0eee 100644
--- a/src/client/views/nodes/AudioBox.tsx
+++ b/src/client/views/nodes/AudioBox.tsx
@@ -1,49 +1,54 @@
import React = require("react");
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
-import {
- action,
- computed,
- IReactionDisposer,
- observable,
- reaction,
- runInAction
-} from "mobx";
+import { action, computed, IReactionDisposer, observable, runInAction } from "mobx";
import { observer } from "mobx-react";
import { DateField } from "../../../fields/DateField";
-import { Doc, DocListCast, Opt } from "../../../fields/Doc";
+import { Doc, DocListCast } from "../../../fields/Doc";
+import { documentSchema } from "../../../fields/documentSchemas";
+import { makeInterface } from "../../../fields/Schema";
import { ComputedField } from "../../../fields/ScriptField";
-import { Cast, NumCast } from "../../../fields/Types";
+import { Cast, DateCast, NumCast } from "../../../fields/Types";
import { AudioField, nullAudio } from "../../../fields/URLField";
-import { emptyFunction, formatTime } from "../../../Utils";
+import { emptyFunction, formatTime, OmitKeys, returnFalse, setupMoveUpEvents } from "../../../Utils";
import { DocUtils } from "../../documents/Documents";
import { Networking } from "../../Network";
import { CurrentUserUtils } from "../../util/CurrentUserUtils";
-import { SnappingManager } from "../../util/SnappingManager";
-import { CollectionStackedTimeline } from "../collections/CollectionStackedTimeline";
+import { DragManager } from "../../util/DragManager";
+import { undoBatch } from "../../util/UndoManager";
+import { CollectionStackedTimeline, TrimScope } from "../collections/CollectionStackedTimeline";
import { ContextMenu } from "../ContextMenu";
import { ContextMenuProps } from "../ContextMenuItem";
-import {
- ViewBoxAnnotatableComponent,
- ViewBoxAnnotatableProps
-} from "../DocComponent";
-import { Colors } from "../global/globalEnums";
+import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from "../DocComponent";
import "./AudioBox.scss";
import { FieldView, FieldViewProps } from "./FieldView";
-import { LinkDocPreview } from "./LinkDocPreview";
declare class MediaRecorder {
constructor(e: any); // whatever MediaRecorder has
}
+
+type AudioDocument = makeInterface<[typeof documentSchema]>;
+const AudioDocument = makeInterface(documentSchema);
+
+enum media_state {
+ PendingRecording = "pendingRecording",
+ Recording = "recording",
+ Paused = "paused",
+ Playing = "playing"
+}
@observer
-export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProps & FieldViewProps>() {
- public static LayoutString(fieldKey: string) {
- return FieldView.LayoutString(AudioBox, fieldKey);
- }
+export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProps & FieldViewProps, AudioDocument>(AudioDocument) {
+
+ public static LayoutString(fieldKey: string) { return FieldView.LayoutString(AudioBox, fieldKey); }
+ public static SetScrubTime = action((timeInMillisFrom1970: number) => {
+ AudioBox._scrubTime = 0;
+ AudioBox._scrubTime = timeInMillisFrom1970;
+ });
public static Enabled = false;
- static playheadWidth = 40; // width of playhead
- static heightPercent = 75; // height of timeline in percent of height of audioBox.
- static Instance: AudioBox;
+ static topControlsHeight = 30; // width of playhead
+ static bottomControlsHeight = 20; // height of timeline in percent of height of audioBox.
+ @observable static _scrubTime = 0;
+ _dropDisposer?: DragManager.DragDropDisposer;
_disposers: { [name: string]: IReactionDisposer } = {};
_ele: HTMLAudioElement | null = null;
_stackedTimeline = React.createRef<CollectionStackedTimeline>();
@@ -53,74 +58,47 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
_pauseEnd = 0;
_pausedTime = 0;
_stream: MediaStream | undefined;
- _start: number = 0;
_play: any = null;
- _ended: boolean = false;
- @observable static _scrubTime = 0;
- @observable _markerEnd: number = 0;
- @observable _position: number = 0;
- @observable _waveHeight: Opt<number> = NumCast(this.layoutDoc._height);
+ @observable _finished: boolean = false;
+ @observable _volume: number = 1;
+ @observable _muted: boolean = false;
@observable _paused: boolean = false;
- @observable _trimming: boolean = false;
- @observable _trimStart: number = NumCast(this.layoutDoc.clipStart) ? NumCast(this.layoutDoc.clipStart) : 0;
- @observable _trimEnd: number = NumCast(this.layoutDoc.clipEnd) ? NumCast(this.layoutDoc.clipEnd)
- : this.duration;
-
- @computed get mediaState():
- | undefined
- | "pendingRecording"
- | "recording"
- | "paused"
- | "playing" {
- return this.dataDoc.mediaState as
- | undefined
- | "pendingRecording"
- | "recording"
- | "paused"
- | "playing";
- }
- set mediaState(value) {
- this.dataDoc.mediaState = value;
- }
- public static SetScrubTime = action((timeInMillisFrom1970: number) => {
- AudioBox._scrubTime = 0;
- AudioBox._scrubTime = timeInMillisFrom1970;
- });
- @computed get recordingStart() {
- return Cast(
- this.dataDoc[this.props.fieldKey + "-recordingStart"],
- DateField
- )?.date.getTime();
- }
- @computed get duration() {
- return NumCast(this.dataDoc[`${this.fieldKey}-duration`]);
- }
- @computed get trimDuration() {
- return this._trimming && this._trimEnd ? this.duration : this._trimEnd - this._trimStart;
- }
- @computed get anchorDocs() {
- return DocListCast(this.dataDoc[this.annotationKey]);
- }
- @computed get links() {
- return DocListCast(this.dataDoc.links);
+ // @observable rawDuration: number = 0; // computed from the length of the audio element when loaded
+ @computed get recordingStart() { return DateCast(this.dataDoc[this.fieldKey + "-recordingStart"])?.date.getTime(); }
+ @computed get rawDuration() { return NumCast(this.dataDoc[`${this.fieldKey}-duration`]); } // bcz: shouldn't be needed since it's computed from audio element
+ // mehek: not 100% sure but i think due to the order in which things are loaded this is necessary ^^
+ // if you get rid of it and set the value to 0 the timeline and waveform will set their bounds incorrectly
+
+ @computed get miniPlayer() { return this.props.PanelHeight() < 50 }
+ @computed get links() { return DocListCast(this.dataDoc.links); }
+ @computed get pauseTime() { return this._pauseEnd - this._pauseStart; } // total time paused to update the correct time
+ @computed get mediaState() { return this.layoutDoc.mediaState as media_state; }
+ @computed get path() { // returns the path of the audio file
+ const path = Cast(this.props.Document[this.fieldKey], AudioField, null)?.url.href || "";
+ return path === nullAudio ? "" : path;
}
- @computed get pauseTime() {
- return this._pauseEnd - this._pauseStart;
- } // total time paused to update the correct time
- @computed get heightPercent() {
- return AudioBox.heightPercent;
+ set mediaState(value) { this.layoutDoc.mediaState = value; }
+
+ get timeline() { return this._stackedTimeline.current; } // can't be computed since it's not observable
+
+ componentWillUnmount() {
+ this.removeCurrentlyPlaying();
+ this._dropDisposer?.();
+ Object.values(this._disposers).forEach((disposer) => disposer?.());
+ const ind = DocUtils.ActiveRecordings.indexOf(this);
+ ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1);
}
- constructor(props: Readonly<ViewBoxAnnotatableProps & FieldViewProps>) {
- super(props);
- AudioBox.Instance = this;
+ @action
+ componentDidMount() {
+ this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link.
- if (this.duration === undefined) {
- runInAction(
- () =>
- (this.Document[this.fieldKey + "-duration"] = this.Document.duration)
- );
+ if (this.path) {
+ this.mediaState = media_state.Paused;
+ this.setPlayheadTime(NumCast(this.layoutDoc.clipStart));
+ } else {
+ this.mediaState = undefined as any as media_state;
}
}
@@ -128,8 +106,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
let la1 = l.anchor1 as Doc;
let la2 = l.anchor2 as Doc;
const linkTime =
- this._stackedTimeline.current?.anchorStart(la2) ||
- this._stackedTimeline.current?.anchorStart(la1) ||
+ this.timeline?.anchorStart(la2) ||
+ this.timeline?.anchorStart(la1) ||
0;
if (Doc.AreProtosEqual(la1, this.dataDoc)) {
la1 = l.anchor2 as Doc;
@@ -139,154 +117,93 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
getAnchor = () => {
- return (
- CollectionStackedTimeline.createAnchor(
- this.rootDoc,
- this.dataDoc,
- this.annotationKey,
- "_timecodeToShow" /* audioStart */,
- "_timecodeToHide" /* audioEnd */,
- this._ele?.currentTime ||
- Cast(this.props.Document._currentTimecode, "number", null) ||
- (this.mediaState === "recording"
- ? (Date.now() - (this.recordingStart || 0)) / 1000
- : undefined)
- ) || this.rootDoc
- );
- }
-
- componentWillUnmount() {
- Object.values(this._disposers).forEach((disposer) => disposer?.());
- const ind = DocUtils.ActiveRecordings.indexOf(this);
- ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1);
- }
-
- @action
- componentDidMount() {
- this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link.
-
- this.mediaState = this.path ? "paused" : undefined;
-
- this.layoutDoc.clipStart = this.layoutDoc.clipStart ? this.layoutDoc.clipStart : 0;
- this.layoutDoc.clipEnd = this.layoutDoc.clipEnd ? this.layoutDoc.clipEnd : this.duration ? this.duration : undefined;
-
- this.path && this.setAnchorTime(NumCast(this.layoutDoc.clipStart));
- this.path && this.timecodeChanged();
-
- this._disposers.triggerAudio = reaction(
- () =>
- !LinkDocPreview.LinkInfo && this.props.renderDepth !== -1
- ? NumCast(this.Document._triggerAudio, null)
- : undefined,
- (start) =>
- start !== undefined &&
- setTimeout(() => {
- this.playFrom(start);
- setTimeout(() => {
- this.Document._currentTimecode = start;
- this.Document._triggerAudio = undefined;
- }, 10);
- }), // wait for mainCont and try again to play
- { fireImmediately: true }
- );
-
- this._disposers.audioStop = reaction(
- () =>
- this.props.renderDepth !== -1 && !LinkDocPreview.LinkInfo
- ? Cast(this.Document._audioStop, "number", null)
- : undefined,
- (audioStop) =>
- audioStop !== undefined &&
- setTimeout(() => {
- this.Pause();
- setTimeout(() => (this.Document._audioStop = undefined), 10);
- }), // wait for mainCont and try again to play
- { fireImmediately: true }
- );
+ return CollectionStackedTimeline.createAnchor(
+ this.rootDoc,
+ this.dataDoc,
+ this.annotationKey,
+ "_timecodeToShow" /* audioStart */,
+ "_timecodeToHide" /* audioEnd */,
+ this._ele?.currentTime ||
+ Cast(this.props.Document._currentTimecode, "number", null) ||
+ (this.mediaState === media_state.Recording
+ ? (Date.now() - (this.recordingStart || 0)) / 1000
+ : undefined)
+ ) || this.rootDoc;
}
// for updating the timecode
@action
timecodeChanged = () => {
- const htmlEle = this._ele;
- if (this.mediaState !== "recording" && htmlEle) {
- htmlEle.duration &&
- htmlEle.duration !== Infinity &&
- runInAction(
- () => (this.dataDoc[this.fieldKey + "-duration"] = htmlEle.duration)
- );
- this.layoutDoc.clipEnd = this.layoutDoc.clipEnd ? Math.min(this.duration, NumCast(this.layoutDoc.clipEnd)) : this.duration;
- this._trimEnd = this._trimEnd ? Math.min(this.duration, this._trimEnd) : this.duration;
+ if (this.mediaState !== media_state.Recording && this._ele) {
this.links
- .map((l) => this.getLinkData(l))
+ .map(l => this.getLinkData(l))
.forEach(({ la1, la2, linkTime }) => {
- if (
- linkTime > NumCast(this.layoutDoc._currentTimecode) &&
- linkTime < htmlEle.currentTime
- ) {
+ if (linkTime > NumCast(this.layoutDoc._currentTimecode) &&
+ linkTime < this._ele!.currentTime) {
Doc.linkFollowHighlight(la1);
}
});
- this.layoutDoc._currentTimecode = htmlEle.currentTime;
-
+ this.layoutDoc._currentTimecode = this._ele.currentTime;
+ this.timeline?.scrollToTime(NumCast(this.layoutDoc._currentTimecode));
}
}
- // pause play back
- Pause = action(() => {
- this._ele!.pause();
- this.mediaState = "paused";
- });
-
- // play audio for documents created during recording
- playFromTime = (absoluteTime: number) => {
- this.recordingStart &&
- this.playFrom((absoluteTime - this.recordingStart) / 1000);
- }
-
// play back the audio from time
@action
- playFrom = (seekTimeInSeconds: number, endTime: number = this._trimEnd, fullPlay: boolean = false) => {
- clearTimeout(this._play);
- if (Number.isNaN(this._ele?.duration)) {
+ playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false) => {
+ clearTimeout(this._play); // abort any previous clip ending
+ if (Number.isNaN(this._ele?.duration)) { // audio element isn't loaded yet... wait 1/2 second and try again
setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500);
- } else if (this._ele && AudioBox.Enabled) {
- if (seekTimeInSeconds < 0) {
- if (seekTimeInSeconds > -1) {
- setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000);
- } else {
- this.Pause();
- }
- } else if (this._trimStart <= endTime && seekTimeInSeconds <= this._trimEnd) {
- const start = Math.max(this._trimStart, seekTimeInSeconds);
- const end = Math.min(this._trimEnd, endTime);
+ }
+ else if (this.timeline && this._ele && AudioBox.Enabled) {
+ const end = Math.min(this.timeline.trimEnd, endTime ?? this.timeline.trimEnd);
+ const start = Math.max(this.timeline.trimStart, seekTimeInSeconds);
+ if (seekTimeInSeconds >= 0 && this.timeline.trimStart <= end && seekTimeInSeconds <= this.timeline.trimEnd) {
this._ele.currentTime = start;
this._ele.play();
- runInAction(() => (this.mediaState = "playing"));
- if (endTime !== this.duration) {
- this._play = setTimeout(
- () => {
- this._ended = fullPlay ? true : this._ended;
- this.Pause();
- },
- (end - start) * 1000
- ); // use setTimeout to play a specific duration
- }
+ this.mediaState = media_state.Playing;
+ this.addCurrentlyPlaying();
+ this._play = setTimeout(
+ () => {
+ if (fullPlay) this._finished = true;
+ // removes from currently playing if playback has reached end of range marker
+ else this.removeCurrentlyPlaying();
+ this.Pause();
+ },
+ (end - start) * 1000);
} else {
this.Pause();
}
}
}
+ // removes from currently playing display
+ @action
+ removeCurrentlyPlaying = () => {
+ if (CollectionStackedTimeline.CurrentlyPlaying) {
+ const index = CollectionStackedTimeline.CurrentlyPlaying.indexOf(this.layoutDoc.doc as Doc);
+ index !== -1 && CollectionStackedTimeline.CurrentlyPlaying.splice(index, 1);
+ }
+ }
+
+ @action
+ addCurrentlyPlaying = () => {
+ if (!CollectionStackedTimeline.CurrentlyPlaying) {
+ CollectionStackedTimeline.CurrentlyPlaying = [];
+ }
+ if (CollectionStackedTimeline.CurrentlyPlaying.indexOf(this.layoutDoc.doc as Doc) == -1) {
+ CollectionStackedTimeline.CurrentlyPlaying.push(this.layoutDoc.doc as Doc);
+ }
+ }
+
// update the recording time
updateRecordTime = () => {
- if (this.mediaState === "recording") {
+ if (this.mediaState === media_state.Recording) {
setTimeout(this.updateRecordTime, 30);
if (this._paused) {
this._pausedTime += (new Date().getTime() - this._recordStart) / 1000;
} else {
- this.layoutDoc._currentTimecode =
- (new Date().getTime() - this._recordStart - this.pauseTime) / 1000;
+ this.layoutDoc._currentTimecode = (new Date().getTime() - this._recordStart - this.pauseTime) / 1000;
}
}
}
@@ -295,49 +212,57 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
recordAudioAnnotation = async () => {
this._stream = await navigator.mediaDevices.getUserMedia({ audio: true });
this._recorder = new MediaRecorder(this._stream);
- this.dataDoc[this.props.fieldKey + "-recordingStart"] = new DateField(
- new Date()
- );
+ this.dataDoc[this.fieldKey + "-recordingStart"] = new DateField();
DocUtils.ActiveRecordings.push(this);
this._recorder.ondataavailable = async (e: any) => {
console.log("Data available", e);
const [{ result }] = await Networking.UploadFilesToServer(e.data);
console.log("Data result", result);
if (!(result instanceof Error)) {
- this.props.Document[this.props.fieldKey] = new AudioField(result.accessPaths.agnostic.client);
+ this.props.Document[this.fieldKey] = new AudioField(result.accessPaths.agnostic.client);
}
};
this._recordStart = new Date().getTime();
- runInAction(() => (this.mediaState = "recording"));
- setTimeout(this.updateRecordTime, 0);
+ runInAction(() => this.mediaState = media_state.Recording);
+ setTimeout(this.updateRecordTime);
this._recorder.start();
- setTimeout(() => this._recorder && this.stopRecording(), 60 * 60 * 1000); // stop after an hour
+ setTimeout(this.stopRecording, 60 * 60 * 1000); // stop after an hour
+ }
+
+ @action
+ stopRecording = () => {
+ if (this._recorder) {
+ this._recorder.stop();
+ this._recorder = undefined;
+ this.dataDoc[this.fieldKey + "-duration"] = (new Date().getTime() - this._recordStart - this.pauseTime) / 1000;
+ this.mediaState = media_state.Paused;
+ this._stream?.getAudioTracks()[0].stop();
+ const ind = DocUtils.ActiveRecordings.indexOf(this);
+ ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1);
+ }
}
// context menu
specificContextMenu = (e: React.MouseEvent): void => {
const funcs: ContextMenuProps[] = [];
funcs.push({
- description:
- (this.layoutDoc.hideAnchors ? "Don't hide" : "Hide") + " anchors",
- event: () => (this.layoutDoc.hideAnchors = !this.layoutDoc.hideAnchors),
+ description: (this.layoutDoc.hideAnchors ? "Don't hide" : "Hide") + " anchors",
+ event: e => this.layoutDoc.hideAnchors = !this.layoutDoc.hideAnchors,
icon: "expand-arrows-alt",
});
funcs.push({
- description:
- (this.layoutDoc.dontAutoPlayFollowedLinks ? "" : "Don't") +
- " play when link is selected",
- event: () =>
- (this.layoutDoc.dontAutoPlayFollowedLinks =
- !this.layoutDoc.dontAutoPlayFollowedLinks),
+ description: (this.layoutDoc.dontAutoFollowLinks ? "" : "Don't") + " follow links when encountered",
+ event: e => this.layoutDoc.dontAutoFollowLinks = !this.layoutDoc.dontAutoFollowLinks,
icon: "expand-arrows-alt",
});
funcs.push({
- description:
- (this.layoutDoc.autoPlayAnchors ? "Don't auto play" : "Auto play") +
- " anchors onClick",
- event: () =>
- (this.layoutDoc.autoPlayAnchors = !this.layoutDoc.autoPlayAnchors),
+ description: (this.layoutDoc.dontAutoPlayFollowedLinks ? "" : "Don't") + " play when link is selected",
+ event: e => this.layoutDoc.dontAutoPlayFollowedLinks = !this.layoutDoc.dontAutoPlayFollowedLinks,
+ icon: "expand-arrows-alt",
+ });
+ funcs.push({
+ description: (this.layoutDoc.autoPlayAnchors ? "Don't auto" : "Auto") + " play anchors onClick",
+ event: e => this.layoutDoc.autoPlayAnchors = !this.layoutDoc.autoPlayAnchors,
icon: "expand-arrows-alt",
});
ContextMenu.Instance?.addItem({
@@ -347,23 +272,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
});
}
- // stops the recording
- stopRecording = action(() => {
- this._recorder.stop();
- this._recorder = undefined;
- this.dataDoc[this.fieldKey + "-duration"] =
- (new Date().getTime() - this._recordStart - this.pauseTime) / 1000;
- this.mediaState = "paused";
- this._trimEnd = this.duration;
- this.layoutDoc.clipStart = 0;
- this.layoutDoc.clipEnd = this.duration;
- this._stream?.getAudioTracks()[0].stop();
- const ind = DocUtils.ActiveRecordings.indexOf(this);
- ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1);
- });
-
// button for starting and stopping the recording
- recordClick = (e: React.MouseEvent) => {
+ Record = (e: React.MouseEvent) => {
if (e.button === 0 && !e.ctrlKey) {
this._recorder ? this.stopRecording() : this.recordAudioAnnotation();
e.stopPropagation();
@@ -372,33 +282,44 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
// for play button
Play = (e?: any) => {
- let start;
- if (this._ended || this._ele!.currentTime === this.duration) {
- start = this._trimStart;
- this._ended = false;
- }
- else {
- start = this._ele!.currentTime;
+ e?.stopPropagation?.();
+
+ if (this.timeline && this._ele) {
+ const eleTime = this._ele.currentTime;
+ let start = eleTime >= this.timeline.trimEnd || eleTime <= this.timeline.trimStart ? this.timeline.trimStart : eleTime;
+ if (this._finished) {
+ this._finished = false;
+ start = this.timeline.trimStart;
+ }
+ this.playFrom(start, this.timeline.trimEnd, true);
}
+ }
- this.playFrom(start, this._trimEnd, true);
- e?.stopPropagation?.();
+ // pause play back
+ @action
+ Pause = () => {
+ if (this._ele) {
+ this._ele.pause();
+ this.mediaState = media_state.Paused;
+ if (!this._finished) clearTimeout(this._play);
+ this.removeCurrentlyPlaying();
+ }
}
// creates a text document for dictation
onFile = (e: any) => {
const newDoc = CurrentUserUtils.GetNewTextDoc(
"",
- NumCast(this.props.Document.x),
- NumCast(this.props.Document.y) +
- NumCast(this.props.Document._height) +
+ NumCast(this.rootDoc.x),
+ NumCast(this.rootDoc.y) +
+ NumCast(this.layoutDoc._height) +
10,
- NumCast(this.props.Document._width),
- 2 * NumCast(this.props.Document._height)
+ NumCast(this.layoutDoc._width),
+ 2 * NumCast(this.layoutDoc._height)
);
Doc.GetProto(newDoc).recordingSource = this.dataDoc;
Doc.GetProto(newDoc).recordingStart = ComputedField.MakeFunction(
- `self.recordingSource["${this.props.fieldKey}-recordingStart"]`
+ `self.recordingSource["${this.fieldKey}-recordingStart"]`
);
Doc.GetProto(newDoc).mediaState = ComputedField.MakeFunction(
"self.recordingSource.mediaState"
@@ -410,25 +331,10 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
// ref for updating time
setRef = (e: HTMLAudioElement | null) => {
e?.addEventListener("timeupdate", this.timecodeChanged);
- e?.addEventListener("ended", this.Pause);
+ e?.addEventListener("ended", () => { this._finished = true; this.Pause() });
this._ele = e;
}
- // returns the path of the audio file
- @computed get path() {
- const field = Cast(this.props.Document[this.props.fieldKey], AudioField);
- const path = field instanceof AudioField ? field.url.href : "";
- return path === nullAudio ? "" : path;
- }
-
- // returns the html audio element
- @computed get audio() {
- return <audio ref={this.setRef} className={`audiobox-control${this.props.isContentActive() ? "-interactive" : ""}`}>
- <source src={this.path} type="audio/mpeg" />
- Not supported.
- </audio>;
- }
-
// pause the time during recording phase
@action
recordPause = (e: React.MouseEvent) => {
@@ -447,97 +353,204 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
e.stopPropagation();
}
- playing = () => this.mediaState === "playing";
playLink = (link: Doc) => {
- const stack = this._stackedTimeline.current;
if (link.annotationOn === this.rootDoc) {
if (!this.layoutDoc.dontAutoPlayFollowedLinks) {
- this.playFrom(stack?.anchorStart(link) || 0, stack?.anchorEnd(link));
+ this.playFrom(this.timeline?.anchorStart(link) || 0, this.timeline?.anchorEnd(link));
} else {
- this._ele!.currentTime = this.layoutDoc._currentTimecode =
- stack?.anchorStart(link) || 0;
+ this._ele!.currentTime = this.layoutDoc._currentTimecode = this.timeline?.anchorStart(link) || 0;
}
} else {
this.links
.filter((l) => l.anchor1 === link || l.anchor2 === link)
.forEach((l) => {
const { la1, la2 } = this.getLinkData(l);
- const startTime = stack?.anchorStart(la1) || stack?.anchorStart(la2);
- const endTime = stack?.anchorEnd(la1) || stack?.anchorEnd(la2);
+ const startTime = this.timeline?.anchorStart(la1) || this.timeline?.anchorStart(la2);
+ const endTime = this.timeline?.anchorEnd(la1) || this.timeline?.anchorEnd(la2);
if (startTime !== undefined) {
if (!this.layoutDoc.dontAutoPlayFollowedLinks) {
- endTime
- ? this.playFrom(startTime, endTime)
- : this.playFrom(startTime);
+ this.playFrom(startTime, endTime);
} else {
- this._ele!.currentTime = this.layoutDoc._currentTimecode =
- startTime;
+ this._ele!.currentTime = this.layoutDoc._currentTimecode = startTime;
}
}
});
}
}
- // shows trim controls
@action
- startTrim = () => {
- if (!this.duration) {
- this.timecodeChanged();
- }
- if (this.mediaState === "playing") {
- this.Pause();
- }
- this._trimming = true;
+ timelineWhenChildContentsActiveChanged = (isActive: boolean) =>
+ this.props.whenChildContentsActiveChanged(this._isAnyChildContentActive = isActive)
+ timelineScreenToLocal = () =>
+ this.props.ScreenToLocalTransform().translate(0, -AudioBox.bottomControlsHeight)
+ setPlayheadTime = (time: number) => this._ele!.currentTime = this.layoutDoc._currentTimecode = time;
+ playing = () => this.mediaState === media_state.Playing;
+ isActiveChild = () => this._isAnyChildContentActive;
+
+ timelineWidth = () => this.props.PanelWidth();
+ timelineHeight = () => (this.props.PanelHeight() - (AudioBox.topControlsHeight + AudioBox.bottomControlsHeight))
+
+ @undoBatch
+ finishTrim = () => { // hides trim controls and displays new clip
+ this.Pause();
+ this.setPlayheadTime(Math.max(Math.min(this.timeline?.trimEnd || 0, this._ele!.currentTime), this.timeline?.trimStart || 0));
+ this.timeline?.StopTrimming();
+ }
+
+ startTrim = (scope: TrimScope) => {
+ this.Pause();
+ this.timeline?.StartTrimming(scope);
+ }
+
+ onClipPointerDown = (e: React.PointerEvent) => {
+ e.stopPropagation();
+ this.timeline && setupMoveUpEvents(this, e, returnFalse, returnFalse, action((e: PointerEvent, doubleTap?: boolean) => {
+ if (doubleTap) {
+ this.startTrim(TrimScope.All);
+ } else if (this.timeline) {
+ this.Pause();
+ this.timeline.IsTrimming !== TrimScope.None ? this.finishTrim() : this.startTrim(TrimScope.Clip);
+ }
+ }));
+ }
+
+ zoom = (zoom: number) => {
+ this.timeline?.setZoom(zoom);
}
- // hides trim controls and displays new clip
@action
- finishTrim = () => {
- if (this.mediaState === "playing") {
- this.Pause();
+ setVolume = (volume: number) => {
+ if (this._ele) {
+ this._volume = volume;
+ this._ele.volume = volume;
+ if (this._muted) {
+ this.toggleMute();
+ }
}
- this.layoutDoc.clipStart = this._trimStart;
- this.layoutDoc.clipEnd = this._trimEnd;
- this._trimming = false;
- this.setAnchorTime(Math.max(Math.min(this._trimEnd, this._ele!.currentTime), this._trimStart));
}
@action
- setStartTrim = (newStart: number) => {
- this._trimStart = newStart;
+ toggleMute = () => {
+ if (this._ele) {
+ this._muted = !this._muted;
+ this._ele.muted = this._muted;
+ }
}
- @action
- setEndTrim = (newEnd: number) => {
- this._trimEnd = newEnd;
+ setupTimelineDrop = (r: HTMLDivElement | null) => {
+ if (r && this.timeline) {
+ this._dropDisposer?.();
+ this._dropDisposer = DragManager.MakeDropTarget(r,
+ (e, de) => {
+ const [xp, yp] = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y);
+ de.complete.docDragData && this.timeline!.internalDocDrop(e, de, de.complete.docDragData, xp);
+ },
+ this.layoutDoc, undefined);
+ }
+ }
+
+ @computed get recordingControls() {
+ return <div className="audiobox-recorder">
+ <div className="audiobox-dictation" onClick={this.onFile}>
+ <FontAwesomeIcon
+ size="2x"
+ icon="file-alt" />
+ </div>
+ {[media_state.Recording, media_state.Playing].includes(this.mediaState) ?
+ <div className="recording-controls" onClick={e => e.stopPropagation()}>
+ <div className="record-button" onClick={this.Record}>
+ <FontAwesomeIcon
+ size="2x"
+ icon="stop" />
+ </div>
+ <div className="record-button" onClick={this._paused ? this.recordPlay : this.recordPause}>
+ <FontAwesomeIcon
+ size="2x"
+ icon={this._paused ? "play" : "pause"} />
+ </div>
+ <div className="record-timecode">
+ {formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode)))}
+ </div>
+ </div>
+ :
+ <div className="audiobox-start-record">
+ <FontAwesomeIcon icon="microphone" />
+ RECORD
+ </div>}
+ </div>
+ }
+
+ @computed get playbackControls() {
+ return <div className="audiobox-file" style={{
+ pointerEvents: this._isAnyChildContentActive || this.props.isContentActive() ? "all" : "none",
+ flexDirection: this.miniPlayer ? "row" : "column",
+ justifyContent: this.miniPlayer ? "flex-start" : "space-between"
+ }}>
+ <div className="audiobox-controls">
+ <div className="controls-left">
+ <div className="audiobox-button"
+ title={this.mediaState === media_state.Paused ? "play" : "pause"}
+ onPointerDown={this.mediaState === media_state.Paused ? this.Play : (e) => { e.stopPropagation(); this.Pause(); }}>
+ <FontAwesomeIcon icon={this.mediaState === media_state.Paused ? "play" : "pause"} size={"1x"} />
+ </div>
+
+ {!this.miniPlayer &&
+ <div className="audiobox-button"
+ title={this.timeline?.IsTrimming !== TrimScope.None ? "finish" : "trim"}
+ onPointerDown={this.onClipPointerDown}>
+ <FontAwesomeIcon icon={this.timeline?.IsTrimming !== TrimScope.None ? "check" : "cut"} size={"1x"} />
+ </div>}
+ </div>
+ <div className="controls-right">
+ <div className="audiobox-button"
+ title={this._muted ? "unmute" : "mute"}
+ onPointerDown={(e) => { e.stopPropagation(); this.toggleMute(); }}>
+ <FontAwesomeIcon icon={this._muted ? "volume-mute" : "volume-up"} />
+ </div>
+ <input type="range" step="0.1" min="0" max="1" value={this._muted ? 0 : this._volume}
+ className="toolbar-slider volume"
+ onPointerDown={(e: React.PointerEvent) => { e.stopPropagation(); }}
+ onChange={(e: React.ChangeEvent<HTMLInputElement>) => { this.setVolume(Number(e.target.value)) }}
+ />
+ </div>
+ </div>
+
+ <div className="audiobox-playback" style={{ width: this.miniPlayer ? 0 : "100%" }}>
+ <div className="audiobox-timeline">
+ {this.renderTimeline}
+ </div>
+ </div>
+
+ {this.audio}
+
+ <div className="audiobox-timecodes">
+ <div className="timecode-current">
+ {this.timeline && formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this.timeline.clipStart)))}
+ </div>
+ {!this.miniPlayer &&
+ <div className="bottom-controls-middle">
+ <FontAwesomeIcon icon="search-plus" />
+ <input type="range" step="0.1" min="1" max="5" value={this.timeline?._zoomFactor}
+ className="toolbar-slider" id="zoom-slider"
+ onPointerDown={(e: React.PointerEvent) => { e.stopPropagation(); }}
+ onChange={(e: React.ChangeEvent<HTMLInputElement>) => { this.zoom(Number(e.target.value)); }}
+ />
+ </div>}
+
+ <div className="timecode-duration">
+ {this.timeline && formatTime(Math.round(this.timeline.clipDuration))}
+ </div>
+ </div>
+
+
+ </div>
}
- isActiveChild = () => this._isAnyChildContentActive;
- timelineWhenChildContentsActiveChanged = (isActive: boolean) =>
- this.props.whenChildContentsActiveChanged(
- runInAction(() => (this._isAnyChildContentActive = isActive))
- )
- timelineScreenToLocal = () =>
- this.props
- .ScreenToLocalTransform()
- .translate(
- -AudioBox.playheadWidth,
- (-(100 - this.heightPercent) / 200) * this.props.PanelHeight()
- )
- setAnchorTime = (time: number) => {
- (this._ele!.currentTime = this.layoutDoc._currentTimecode = time);
- }
-
- timelineHeight = () =>
- (((this.props.PanelHeight() * this.heightPercent) / 100) *
- this.heightPercent) /
- 100 // panelHeight * heightPercent is player height. * heightPercent is timeline height (as per css inline)
- timelineWidth = () => this.props.PanelWidth() - AudioBox.playheadWidth;
@computed get renderTimeline() {
return (
<CollectionStackedTimeline
ref={this._stackedTimeline}
- {...this.props}
+ {...OmitKeys(this.props, ["CollectionFreeFormDocumentView"]).omit}
fieldKey={this.annotationKey}
dictationKey={this.fieldKey + "-dictation"}
mediaPath={this.path}
@@ -547,13 +560,10 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
focus={DocUtils.DefaultFocus}
bringToFront={emptyFunction}
CollectionView={undefined}
- duration={this.duration}
playFrom={this.playFrom}
- setTime={this.setAnchorTime}
+ setTime={this.setPlayheadTime}
playing={this.playing}
- whenChildContentsActiveChanged={
- this.timelineWhenChildContentsActiveChanged
- }
+ whenChildContentsActiveChanged={this.timelineWhenChildContentsActiveChanged}
moveDocument={this.moveDocument}
addDocument={this.addDocument}
removeDocument={this.removeDocument}
@@ -565,142 +575,33 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
playLink={this.playLink}
PanelWidth={this.timelineWidth}
PanelHeight={this.timelineHeight}
- trimming={this._trimming}
- trimStart={this._trimStart}
- trimEnd={this._trimEnd}
- trimDuration={this.trimDuration}
- setStartTrim={this.setStartTrim}
- setEndTrim={this.setEndTrim}
+ rawDuration={this.rawDuration}
/>
);
}
+ // returns the html audio element
+ @computed get audio() {
+ return <audio ref={this.setRef}
+ className={`audiobox-control${this.props.isContentActive() ? "-interactive" : ""}`}
+ onLoadedData={action(e =>
+ (this._ele?.duration && this._ele?.duration !== Infinity) &&
+ (this.dataDoc[this.fieldKey + "-duration"] = this._ele.duration)
+ )}
+ >
+ <source src={this.path} type="audio/mpeg" />
+ Not supported.
+ </audio>;
+ }
render() {
- const interactive =
- SnappingManager.GetIsDragging() || this.props.isContentActive()
- ? "-interactive"
- : "";
- return (
- <div
- className="audiobox-container"
- onContextMenu={this.specificContextMenu}
- onClick={
- !this.path && !this._recorder ? this.recordAudioAnnotation : undefined
- }
- style={{
- pointerEvents:
- this.props.layerProvider?.(this.layoutDoc) === false
- ? "none"
- : undefined,
- }}
- >
- {!this.path ? (
- <div className="audiobox-buttons">
- <div className="audiobox-dictation" onClick={this.onFile}>
- <FontAwesomeIcon
- style={{
- width: "30px"
- }}
- icon="file-alt"
- size={this.props.PanelHeight() < 36 ? "1x" : "2x"}
- />
- </div>
- {this.mediaState === "recording" || this.mediaState === "paused" ? (
- <div className="recording" onClick={(e) => e.stopPropagation()}>
- <div className="recording-buttons" onClick={this.recordClick}>
- <FontAwesomeIcon
- icon={"stop"}
- size={this.props.PanelHeight() < 36 ? "1x" : "2x"}
- />
- </div>
- <div
- className="recording-buttons"
- onClick={this._paused ? this.recordPlay : this.recordPause}
- >
- <FontAwesomeIcon
- icon={this._paused ? "play" : "pause"}
- size={this.props.PanelHeight() < 36 ? "1x" : "2x"}
- />
- </div>
- <div className="time">
- {formatTime(
- Math.round(NumCast(this.layoutDoc._currentTimecode))
- )}
- </div>
- </div>
- ) : (
- <div
- className={`audiobox-record${interactive}`}
- style={{ backgroundColor: Colors.DARK_GRAY }}
- >
- <FontAwesomeIcon icon="microphone" />
- RECORD
- </div>
- )}
- </div>
- ) : (
- <div
- className="audiobox-controls"
- style={{
- pointerEvents:
- this._isAnyChildContentActive || this.props.isContentActive()
- ? "all"
- : "none",
- }}
- >
- <div className="audiobox-dictation" />
- <div
- className="audiobox-player"
- style={{ height: `${AudioBox.heightPercent}%` }}
- >
- <div
- className="audiobox-buttons"
- title={this.mediaState === "paused" ? "play" : "pause"}
- onClick={this.mediaState === "paused" ? this.Play : this.Pause}
- >
- {" "}
- <FontAwesomeIcon
- icon={this.mediaState === "paused" ? "play" : "pause"}
- size={"1x"}
- />
- </div>
- <div
- className="audiobox-buttons"
- title={this._trimming ? "finish" : "trim"}
- onClick={this._trimming ? this.finishTrim : this.startTrim}
- >
- <FontAwesomeIcon
- icon={this._trimming ? "check" : "cut"}
- size={"1x"}
- />
- </div>
- <div
- className="audiobox-timeline"
- style={{
- top: 0,
- height: `100%`,
- left: AudioBox.playheadWidth,
- width: `calc(100% - ${AudioBox.playheadWidth}px)`,
- background: "white",
- }}
- >
- {this.renderTimeline}
- </div>
- {this.audio}
- <div className="audioBox-current-time">
- {this._trimming ?
- formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode)))
- : formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this._trimStart)))}
- </div>
- <div className="audioBox-total-time">
- {this._trimming || !this._trimEnd ?
- formatTime(Math.round(NumCast(this.duration)))
- : formatTime(Math.round(NumCast(this.trimDuration)))}
- </div>
- </div>
- </div>
- )}
- </div>
- );
+ return <div
+ ref={this.setupTimelineDrop}
+ className="audiobox-container"
+ onContextMenu={this.specificContextMenu}
+ onClick={!this.path && !this._recorder ? this.recordAudioAnnotation : undefined}
+ style={{ pointerEvents: this.props.layerProvider?.(this.layoutDoc) === false ? "none" : undefined }}
+ >
+ {!this.path ? this.recordingControls : this.playbackControls}
+ </div>;
}
}