aboutsummaryrefslogtreecommitdiff
path: root/src/client/views/nodes/AudioBox.tsx
diff options
context:
space:
mode:
Diffstat (limited to 'src/client/views/nodes/AudioBox.tsx')
-rw-r--r--src/client/views/nodes/AudioBox.tsx291
1 files changed, 136 insertions, 155 deletions
diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx
index 6e6558030..fa78d2301 100644
--- a/src/client/views/nodes/AudioBox.tsx
+++ b/src/client/views/nodes/AudioBox.tsx
@@ -1,6 +1,6 @@
import React = require("react");
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
-import { action, computed, IReactionDisposer, observable, reaction, runInAction } from "mobx";
+import { action, computed, IReactionDisposer, observable, runInAction } from "mobx";
import { observer } from "mobx-react";
import { DateField } from "../../../fields/DateField";
import { Doc, DocListCast } from "../../../fields/Doc";
@@ -19,10 +19,8 @@ import { CollectionStackedTimeline, TrimScope } from "../collections/CollectionS
import { ContextMenu } from "../ContextMenu";
import { ContextMenuProps } from "../ContextMenuItem";
import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from "../DocComponent";
-import { Colors } from "../global/globalEnums";
import "./AudioBox.scss";
import { FieldView, FieldViewProps } from "./FieldView";
-import { LinkDocPreview } from "./LinkDocPreview";
declare class MediaRecorder {
constructor(e: any); // whatever MediaRecorder has
@@ -36,10 +34,14 @@ enum media_state {
Recording = "recording",
Paused = "paused",
Playing = "playing"
-};
+}
@observer
export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProps & FieldViewProps, AudioDocument>(AudioDocument) {
public static LayoutString(fieldKey: string) { return FieldView.LayoutString(AudioBox, fieldKey); }
+ public static SetScrubTime = action((timeInMillisFrom1970: number) => {
+ AudioBox._scrubTime = 0;
+ AudioBox._scrubTime = timeInMillisFrom1970;
+ });
public static Enabled = false;
static playheadWidth = 40; // width of playhead
static heightPercent = 75; // height of timeline in percent of height of audioBox.
@@ -63,13 +65,30 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
@computed get links() { return DocListCast(this.dataDoc.links); }
@computed get pauseTime() { return this._pauseEnd - this._pauseStart; } // total time paused to update the correct time
@computed get mediaState() { return this.layoutDoc.mediaState as media_state; }
+ @computed get path() { // returns the path of the audio file
+ const path = Cast(this.props.Document[this.fieldKey], AudioField, null)?.url.href || "";
+ return path === nullAudio ? "" : path;
+ }
set mediaState(value) { this.layoutDoc.mediaState = value; }
- get timeline() { return this._stackedTimeline.current; }
- public static SetScrubTime = action((timeInMillisFrom1970: number) => {
- AudioBox._scrubTime = 0;
- AudioBox._scrubTime = timeInMillisFrom1970;
- });
+ get timeline() { return this._stackedTimeline.current; } // can't be computed since it's not observable
+
+ componentWillUnmount() {
+ this._dropDisposer?.();
+ Object.values(this._disposers).forEach((disposer) => disposer?.());
+ const ind = DocUtils.ActiveRecordings.indexOf(this);
+ ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1);
+ }
+
+ @action
+ componentDidMount() {
+ this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link.
+
+ this.mediaState = this.path ? media_state.Paused : undefined as any as media_state;
+
+ this.path && this.setAnchorTime(NumCast(this.layoutDoc.clipStart));
+ this.path && this.timecodeChanged();
+ }
getLinkData(l: Doc) {
let la1 = l.anchor1 as Doc;
@@ -100,34 +119,15 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
) || this.rootDoc;
}
- componentWillUnmount() {
- this._dropDisposer?.();
- Object.values(this._disposers).forEach((disposer) => disposer?.());
- const ind = DocUtils.ActiveRecordings.indexOf(this);
- ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1);
- }
-
- @action
- componentDidMount() {
- this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link.
-
- this.mediaState = this.path ? media_state.Paused : undefined as any as media_state;
-
- this.path && this.setAnchorTime(NumCast(this.layoutDoc.clipStart));
- this.path && this.timecodeChanged();
- }
-
// for updating the timecode
@action
timecodeChanged = () => {
if (this.mediaState !== media_state.Recording && this._ele) {
this.links
- .map((l) => this.getLinkData(l))
+ .map(l => this.getLinkData(l))
.forEach(({ la1, la2, linkTime }) => {
- if (
- linkTime > NumCast(this.layoutDoc._currentTimecode) &&
- linkTime < this._ele!.currentTime
- ) {
+ if (linkTime > NumCast(this.layoutDoc._currentTimecode) &&
+ linkTime < this._ele!.currentTime) {
Doc.linkFollowHighlight(la1);
}
});
@@ -135,23 +135,11 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
- // pause play back
- Pause = action(() => {
- this._ele!.pause();
- this.mediaState = media_state.Paused;
- });
-
- // play audio for documents created during recording
- playFromTime = (absoluteTime: number) => {
- this.recordingStart &&
- this.playFrom((absoluteTime - this.recordingStart) / 1000);
- }
-
// play back the audio from time
@action
- playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false): any => {
- clearTimeout(this._play);
- if (Number.isNaN(this._ele?.duration)) {
+ playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false) => {
+ clearTimeout(this._play); // abort any previous clip ending
+ if (Number.isNaN(this._ele?.duration)) { // audio element isn't loaded yet... wait 1/2 second and try again
setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500);
}
else if (this.timeline && this._ele && AudioBox.Enabled) {
@@ -160,18 +148,13 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
if (seekTimeInSeconds >= 0 && this.timeline.trimStart <= end && seekTimeInSeconds <= this.timeline.trimEnd) {
this._ele.currentTime = start;
this._ele.play();
- runInAction(() => this.mediaState = media_state.Playing);
- if (end !== this.timeline.clipDuration) {
- return this._play = setTimeout(
- () => {
- if (fullPlay) this.setAnchorTime(this.timeline!.trimStart);
- this.Pause();
- },
- (end - start) * 1000
- ); // use setTimeout to play a specific duration
- }
- } else if (seekTimeInSeconds < 0 && seekTimeInSeconds > -1) {
- setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000);
+ this.mediaState = media_state.Playing;
+ this._play = setTimeout(
+ () => {
+ if (fullPlay) this.setAnchorTime(this.timeline!.trimStart);
+ this.Pause();
+ },
+ (end - start) * 1000);
} else {
this.Pause();
}
@@ -185,8 +168,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
if (this._paused) {
this._pausedTime += (new Date().getTime() - this._recordStart) / 1000;
} else {
- this.layoutDoc._currentTimecode =
- (new Date().getTime() - this._recordStart - this.pauseTime) / 1000;
+ this.layoutDoc._currentTimecode = (new Date().getTime() - this._recordStart - this.pauseTime) / 1000;
}
}
}
@@ -205,9 +187,22 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
};
this._recordStart = new Date().getTime();
runInAction(() => this.mediaState = media_state.Recording);
- setTimeout(this.updateRecordTime, 0);
+ setTimeout(this.updateRecordTime);
this._recorder.start();
- setTimeout(() => this.stopRecording(), 60 * 60 * 1000); // stop after an hour
+ setTimeout(this.stopRecording, 60 * 60 * 1000); // stop after an hour
+ }
+
+ @action
+ stopRecording = () => {
+ if (this._recorder) {
+ this._recorder.stop();
+ this._recorder = undefined;
+ this.dataDoc[this.fieldKey + "-duration"] = (new Date().getTime() - this._recordStart - this.pauseTime) / 1000;
+ this.mediaState = media_state.Paused;
+ this._stream?.getAudioTracks()[0].stop();
+ const ind = DocUtils.ActiveRecordings.indexOf(this);
+ ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1);
+ }
}
// context menu
@@ -243,22 +238,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
});
}
- // stops the recording
- stopRecording = action(() => {
- if (this._recorder) {
- this._recorder.stop();
- this._recorder = undefined;
- this.dataDoc[this.fieldKey + "-duration"] =
- (new Date().getTime() - this._recordStart - this.pauseTime) / 1000;
- this.mediaState = media_state.Paused;
- this._stream?.getAudioTracks()[0].stop();
- const ind = DocUtils.ActiveRecordings.indexOf(this);
- ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1);
- }
- });
-
// button for starting and stopping the recording
- recordClick = (e: React.MouseEvent) => {
+ Record = (e: React.MouseEvent) => {
if (e.button === 0 && !e.ctrlKey) {
this._recorder ? this.stopRecording() : this.recordAudioAnnotation();
e.stopPropagation();
@@ -267,12 +248,19 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
// for play button
Play = (e?: any) => {
- const eleTime = this._ele!.currentTime;
- const start = eleTime === this.timeline?.trimDuration ? this.timeline.trimStart : eleTime;
+ const eleTime = this._ele?.currentTime || 0;
+ const start = eleTime === this.timeline?.trimEnd ? this.timeline.trimStart : eleTime;
this.playFrom(start, undefined, true);
e?.stopPropagation?.();
}
+ // pause play back
+ @action
+ Pause = () => {
+ this._ele?.pause();
+ this.mediaState = media_state.Paused;
+ }
+
// creates a text document for dictation
onFile = (e: any) => {
const newDoc = CurrentUserUtils.GetNewTextDoc(
@@ -302,27 +290,6 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this._ele = e;
}
- // returns the path of the audio file
- @computed get path() {
- const path = Cast(this.props.Document[this.fieldKey], AudioField, null)?.url.href || "";
- return path === nullAudio ? "" : path;
- }
-
- // returns the html audio element
- @computed get audio() {
- return <audio ref={this.setRef}
- onLoadedData={action(e => {
- const duration = this._ele?.duration;
- if (duration && duration !== Infinity) {
- this.dataDoc[this.fieldKey + "-duration"] = duration;
- }
- })}
- className={`audiobox-control${this.props.isContentActive() ? "-interactive" : ""}`}>
- <source src={this.path} type="audio/mpeg" />
- Not supported.
- </audio>;
- }
-
// pause the time during recording phase
@action
recordPause = (e: React.MouseEvent) => {
@@ -341,14 +308,12 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
e.stopPropagation();
}
- playing = () => this.mediaState === media_state.Playing;
playLink = (link: Doc) => {
if (link.annotationOn === this.rootDoc) {
if (!this.layoutDoc.dontAutoPlayFollowedLinks) {
this.playFrom(this.timeline?.anchorStart(link) || 0, this.timeline?.anchorEnd(link));
} else {
- this._ele!.currentTime = this.layoutDoc._currentTimecode =
- this.timeline?.anchorStart(link) || 0;
+ this._ele!.currentTime = this.layoutDoc._currentTimecode = this.timeline?.anchorStart(link) || 0;
}
} else {
this.links
@@ -368,6 +333,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+ playing = () => this.mediaState === media_state.Playing;
isActiveChild = () => this._isAnyChildContentActive;
timelineWhenChildContentsActiveChanged = (isActive: boolean) =>
this.props.whenChildContentsActiveChanged(
@@ -380,54 +346,21 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
-AudioBox.playheadWidth,
(-(100 - AudioBox.heightPercent) / 200) * this.props.PanelHeight()
)
- setAnchorTime = (time: number) => {
- (this._ele!.currentTime = this.layoutDoc._currentTimecode = time);
- }
+ setAnchorTime = (time: number) => this._ele!.currentTime = this.layoutDoc._currentTimecode = time;
+ timelineWidth = () => this.props.PanelWidth() - AudioBox.playheadWidth;
timelineHeight = () =>
(((this.props.PanelHeight() * AudioBox.heightPercent) / 100) *
AudioBox.heightPercent) / 100 // panelHeight * heightPercent is player height. * heightPercent is timeline height (as per css inline)
- timelineWidth = () => this.props.PanelWidth() - AudioBox.playheadWidth;
- @computed get renderTimeline() {
- return (
- <CollectionStackedTimeline
- ref={this._stackedTimeline}
- {...OmitKeys(this.props, ["CollectionFreeFormDocumentView"]).omit}
- fieldKey={this.annotationKey}
- dictationKey={this.fieldKey + "-dictation"}
- mediaPath={this.path}
- renderDepth={this.props.renderDepth + 1}
- startTag={"_timecodeToShow" /* audioStart */}
- endTag={"_timecodeToHide" /* audioEnd */}
- focus={DocUtils.DefaultFocus}
- bringToFront={emptyFunction}
- CollectionView={undefined}
- playFrom={this.playFrom}
- setTime={this.setAnchorTime}
- playing={this.playing}
- whenChildContentsActiveChanged={this.timelineWhenChildContentsActiveChanged}
- moveDocument={this.moveDocument}
- addDocument={this.addDocument}
- removeDocument={this.removeDocument}
- ScreenToLocalTransform={this.timelineScreenToLocal}
- Play={this.Play}
- Pause={this.Pause}
- isContentActive={this.props.isContentActive}
- isAnyChildContentActive={this.isAnyChildContentActive}
- playLink={this.playLink}
- PanelWidth={this.timelineWidth}
- PanelHeight={this.timelineHeight}
- rawDuration={this.rawDuration}
- />
- );
- }
- // hides trim controls and displays new clip
+
+
@undoBatch
- finishTrim = action(() => {
+ finishTrim = () => { // hides trim controls and displays new clip
this.Pause();
this.setAnchorTime(Math.max(Math.min(this.timeline?.trimEnd || 0, this._ele!.currentTime), this.timeline?.trimStart || 0));
this.timeline?.StopTrimming();
- });
+ }
+
startTrim = (scope: TrimScope) => {
this.Pause();
this.timeline?.StartTrimming(scope);
@@ -444,6 +377,18 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}));
}
+ setupTimelineDrop = (r: HTMLDivElement | null) => {
+ if (r && this.timeline) {
+ this._dropDisposer?.();
+ this._dropDisposer = DragManager.MakeDropTarget(r,
+ (e, de) => {
+ const [xp, yp] = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y);
+ de.complete.docDragData && this.timeline!.internalDocDrop(e, de, de.complete.docDragData, xp);
+ },
+ this.layoutDoc, undefined);
+ }
+ }
+
@computed get recordingControls() {
return <div className="audiobox-buttons">
<div className="audiobox-dictation" onClick={this.onFile}>
@@ -453,8 +398,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
size={this.props.PanelHeight() < 36 ? "1x" : "2x"} />
</div>
{[media_state.Recording, media_state.Playing].includes(this.mediaState) ?
- <div className="recording" onClick={(e) => e.stopPropagation()}>
- <div className="recording-buttons" onClick={this.recordClick}>
+ <div className="recording" onClick={e => e.stopPropagation()}>
+ <div className="recording-buttons" onClick={this.Record}>
<FontAwesomeIcon
icon="stop"
size={this.props.PanelHeight() < 36 ? "1x" : "2x"} />
@@ -522,16 +467,52 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
</div>;
}
- setupTimelineDrop = (r: HTMLDivElement | null) => {
- if (r && this.timeline) {
- this._dropDisposer?.();
- this._dropDisposer = DragManager.MakeDropTarget(r,
- (e, de) => {
- const [xp, yp] = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y);
- de.complete.docDragData && this.timeline!.internalDocDrop(e, de, de.complete.docDragData, xp);
- },
- this.layoutDoc, undefined);
- }
+ @computed get renderTimeline() {
+ return (
+ <CollectionStackedTimeline
+ ref={this._stackedTimeline}
+ {...OmitKeys(this.props, ["CollectionFreeFormDocumentView"]).omit}
+ fieldKey={this.annotationKey}
+ dictationKey={this.fieldKey + "-dictation"}
+ mediaPath={this.path}
+ renderDepth={this.props.renderDepth + 1}
+ startTag={"_timecodeToShow" /* audioStart */}
+ endTag={"_timecodeToHide" /* audioEnd */}
+ focus={DocUtils.DefaultFocus}
+ bringToFront={emptyFunction}
+ CollectionView={undefined}
+ playFrom={this.playFrom}
+ setTime={this.setAnchorTime}
+ playing={this.playing}
+ whenChildContentsActiveChanged={this.timelineWhenChildContentsActiveChanged}
+ moveDocument={this.moveDocument}
+ addDocument={this.addDocument}
+ removeDocument={this.removeDocument}
+ ScreenToLocalTransform={this.timelineScreenToLocal}
+ Play={this.Play}
+ Pause={this.Pause}
+ isContentActive={this.props.isContentActive}
+ isAnyChildContentActive={this.isAnyChildContentActive}
+ playLink={this.playLink}
+ PanelWidth={this.timelineWidth}
+ PanelHeight={this.timelineHeight}
+ rawDuration={this.rawDuration}
+ />
+ );
+ }
+ // returns the html audio element
+ @computed get audio() {
+ return <audio ref={this.setRef}
+ onLoadedData={action(e => {
+ const duration = this._ele?.duration;
+ if (duration && duration !== Infinity) {
+ this.dataDoc[this.fieldKey + "-duration"] = duration;
+ }
+ })}
+ className={`audiobox-control${this.props.isContentActive() ? "-interactive" : ""}`}>
+ <source src={this.path} type="audio/mpeg" />
+ Not supported.
+ </audio>;
}
render() {