aboutsummaryrefslogtreecommitdiff
path: root/src/client/views/nodes/AudioBox.tsx
diff options
context:
space:
mode:
Diffstat (limited to 'src/client/views/nodes/AudioBox.tsx')
-rw-r--r--src/client/views/nodes/AudioBox.tsx89
1 files changed, 43 insertions, 46 deletions
diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx
index d97cb6f84..669622455 100644
--- a/src/client/views/nodes/AudioBox.tsx
+++ b/src/client/views/nodes/AudioBox.tsx
@@ -4,8 +4,6 @@ import { action, computed, IReactionDisposer, observable, runInAction } from "mo
import { observer } from "mobx-react";
import { DateField } from "../../../fields/DateField";
import { Doc, DocListCast } from "../../../fields/Doc";
-import { documentSchema } from "../../../fields/documentSchemas";
-import { makeInterface } from "../../../fields/Schema";
import { ComputedField } from "../../../fields/ScriptField";
import { Cast, DateCast, NumCast } from "../../../fields/Types";
import { AudioField, nullAudio } from "../../../fields/URLField";
@@ -84,12 +82,12 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
@computed get miniPlayer() { return this.props.PanelHeight() < 50; } // used to collapse timeline when node is shrunk
@computed get links() { return DocListCast(this.dataDoc.links); }
@computed get pauseTime() { return this._pauseEnd - this._pauseStart; } // total time paused to update the correct recording time
- @computed get mediaState() { return this.layoutDoc.mediaState as media_state; }
+ @computed get mediaState() { return this.dataDoc.mediaState as media_state; }
@computed get path() { // returns the path of the audio file
const path = Cast(this.props.Document[this.fieldKey], AudioField, null)?.url.href || "";
return path === nullAudio ? "" : path;
}
- set mediaState(value) { this.layoutDoc.mediaState = value; }
+ set mediaState(value) { this.dataDoc.mediaState = value; }
@computed get timeline() { return this._stackedTimeline; } // returns CollectionStackedTimeline ref
@@ -237,9 +235,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this.dataDoc[this.fieldKey + "-recordingStart"] = new DateField();
DocUtils.ActiveRecordings.push(this);
this._recorder.ondataavailable = async (e: any) => {
- console.log("Data available", e);
const [{ result }] = await Networking.UploadFilesToServer(e.data);
- console.log("Data result", result);
if (!(result instanceof Error)) {
this.props.Document[this.fieldKey] = new AudioField(result.accessPaths.agnostic.client);
}
@@ -298,11 +294,10 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
// button for starting and stopping the recording
- Record = (e: React.MouseEvent) => {
- if (e.button === 0 && !e.ctrlKey) {
+ Record = (e: React.PointerEvent) => {
+ e.button === 0 && !e.ctrlKey && setupMoveUpEvents(this, e, returnFalse, returnFalse, action(() => {
this._recorder ? this.stopRecording() : this.recordAudioAnnotation();
- e.stopPropagation();
- }
+ }), false);
}
// for play button
@@ -340,24 +335,28 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
// for dictation button, creates a text document for dictation
onFile = (e: any) => {
- const newDoc = CurrentUserUtils.GetNewTextDoc(
- "",
- NumCast(this.rootDoc.x),
- NumCast(this.rootDoc.y) +
- NumCast(this.layoutDoc._height) +
- 10,
- NumCast(this.layoutDoc._width),
- 2 * NumCast(this.layoutDoc._height)
- );
- Doc.GetProto(newDoc).recordingSource = this.dataDoc;
- Doc.GetProto(newDoc).recordingStart = ComputedField.MakeFunction(
- `self.recordingSource["${this.fieldKey}-recordingStart"]`
- );
- Doc.GetProto(newDoc).mediaState = ComputedField.MakeFunction(
- "self.recordingSource.mediaState"
- );
- this.props.addDocument?.(newDoc);
- e.stopPropagation();
+ setupMoveUpEvents(this, e, returnFalse, returnFalse, action(() => {
+ const newDoc = CurrentUserUtils.GetNewTextDoc(
+ "",
+ NumCast(this.rootDoc.x),
+ NumCast(this.rootDoc.y) +
+ NumCast(this.layoutDoc._height) +
+ 10,
+ NumCast(this.layoutDoc._width),
+ 2 * NumCast(this.layoutDoc._height)
+ );
+ Doc.GetProto(newDoc).recordingSource = this.dataDoc;
+ Doc.GetProto(newDoc).recordingStart = ComputedField.MakeFunction(`self.recordingSource["${this.fieldKey}-recordingStart"]`);
+ Doc.GetProto(newDoc).mediaState = ComputedField.MakeFunction("self.recordingSource.mediaState");
+ const overlayDoc = Doc.UserDoc().myOverlayDocs as Doc;
+ if (DocListCast(overlayDoc[Doc.LayoutFieldKey(overlayDoc)]).includes(this.rootDoc)) {
+ newDoc.x = this.rootDoc.x;
+ newDoc.y = NumCast(this.rootDoc.y) + NumCast(this.rootDoc._height);
+ Doc.AddDocToList(overlayDoc, undefined, newDoc);
+ } else {
+ this.props.addDocument?.(newDoc);
+ }
+ }), false);
}
@@ -370,21 +369,21 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
// pause the time during recording phase
- @action
- recordPause = (e: React.MouseEvent) => {
- this._pauseStart = new Date().getTime();
- this._paused = true;
- this._recorder.pause();
- e.stopPropagation();
+ recordPause = (e: React.PointerEvent) => {
+ setupMoveUpEvents(this, e, returnFalse, returnFalse, action(() => {
+ this._pauseStart = new Date().getTime();
+ this._paused = true;
+ this._recorder.pause();
+ }), false);
}
// continue the recording
- @action
- recordPlay = (e: React.MouseEvent) => {
- this._pauseEnd = new Date().getTime();
- this._paused = false;
- this._recorder.resume();
- e.stopPropagation();
+ recordPlay = (e: React.PointerEvent) => {
+ setupMoveUpEvents(this, e, returnFalse, returnFalse, action(() => {
+ this._pauseEnd = new Date().getTime();
+ this._paused = false;
+ this._recorder.resume();
+ }), false);
}
@@ -503,19 +502,19 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
// UI for recording, initially displayed when new audio created in Dash
@computed get recordingControls() {
return <div className="audiobox-recorder">
- <div className="audiobox-dictation" onClick={this.onFile}>
+ <div className="audiobox-dictation" onPointerDown={this.onFile}>
<FontAwesomeIcon
size="2x"
icon="file-alt" />
</div>
{[media_state.Recording, media_state.Playing].includes(this.mediaState) ?
<div className="recording-controls" onClick={e => e.stopPropagation()}>
- <div className="record-button" onClick={this.Record}>
+ <div className="record-button" onPointerDown={this.Record}>
<FontAwesomeIcon
size="2x"
icon="stop" />
</div>
- <div className="record-button" onClick={this._paused ? this.recordPlay : this.recordPause}>
+ <div className="record-button" onPointerDown={this._paused ? this.recordPlay : this.recordPause}>
<FontAwesomeIcon
size="2x"
icon={this._paused ? "play" : "pause"} />
@@ -525,7 +524,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
</div>
</div>
:
- <div className="audiobox-start-record">
+ <div className="audiobox-start-record" onPointerDown={this.Record}>
<FontAwesomeIcon icon="microphone" />
RECORD
</div>}
@@ -611,7 +610,6 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
renderDepth={this.props.renderDepth + 1}
startTag={"_timecodeToShow" /* audioStart */}
endTag={"_timecodeToHide" /* audioEnd */}
- focus={DocUtils.DefaultFocus}
bringToFront={emptyFunction}
CollectionView={undefined}
playFrom={this.playFrom}
@@ -653,7 +651,6 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
ref={this.setupTimelineDrop}
className="audiobox-container"
onContextMenu={this.specificContextMenu}
- onClick={!this.path && !this._recorder ? this.recordAudioAnnotation : undefined}
style={{ pointerEvents: this.layoutDoc._lockedPosition ? "none" : undefined }}
>
{!this.path ? this.recordingControls : this.playbackControls}