aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorbobzel <zzzman@gmail.com>2021-03-23 16:05:52 -0400
committerbobzel <zzzman@gmail.com>2021-03-23 16:05:52 -0400
commit4b699bd0ded39983227d11e75c8d187546190f00 (patch)
treeef1525b88303e00fa1b8562466f45a2756e6a428 /src
parentb6dc6e1a6219e73fcabe54d68b5bb08209ffa021 (diff)
changed audio/video to use playFrom() viewbox method instead of audioTrigger/videoTrigger doc field. added a preliminiary form of transcription to screenshot boxes.
Diffstat (limited to 'src')
-rw-r--r--src/client/documents/Documents.ts10
-rw-r--r--src/client/util/DocumentManager.ts1
-rw-r--r--src/client/views/SidebarAnnos.tsx10
-rw-r--r--src/client/views/nodes/DocumentView.tsx1
-rw-r--r--src/client/views/nodes/PresBox.tsx23
-rw-r--r--src/client/views/nodes/ScreenshotBox.tsx21
-rw-r--r--src/client/views/nodes/formattedText/FormattedTextBox.tsx37
7 files changed, 70 insertions, 33 deletions
diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts
index da434ab77..cdbf43cd7 100644
--- a/src/client/documents/Documents.ts
+++ b/src/client/documents/Documents.ts
@@ -1048,12 +1048,10 @@ export namespace DocUtils {
export let ActiveRecordings: { props: FieldViewProps, getAnchor: () => Doc }[] = [];
- export function MakeLinkToActiveAudio(doc: Doc) {
- let lastLink: Doc | undefined;
- DocUtils.ActiveRecordings.map(audio => {
- lastLink = DocUtils.MakeLink({ doc: doc }, { doc: audio.getAnchor() || audio.props.Document }, "recording link", "recording timeline");
- });
- return lastLink;
+ export function MakeLinkToActiveAudio(doc: Doc, broadcastEvent = true) {
+ broadcastEvent && runInAction(() => DocumentManager.Instance.RecordingEvent = DocumentManager.Instance.RecordingEvent + 1);
+ return DocUtils.ActiveRecordings.map(audio =>
+ DocUtils.MakeLink({ doc: doc }, { doc: audio.getAnchor() || audio.props.Document }, "recording link", "recording timeline")).lastElement();
}
export function MakeLink(source: { doc: Doc }, target: { doc: Doc }, linkRelationship: string = "", description: string = "", id?: string, allowParCollectionLink?: boolean, showPopup?: number[]) {
diff --git a/src/client/util/DocumentManager.ts b/src/client/util/DocumentManager.ts
index 34ff03335..d4623be28 100644
--- a/src/client/util/DocumentManager.ts
+++ b/src/client/util/DocumentManager.ts
@@ -14,6 +14,7 @@ export class DocumentManager {
//global holds all of the nodes (regardless of which collection they're in)
@observable public DocumentViews: DocumentView[] = [];
+ @observable public RecordingEvent = 0;
@observable public LinkedDocumentViews: { a: DocumentView, b: DocumentView, l: Doc }[] = [];
private static _instance: DocumentManager;
diff --git a/src/client/views/SidebarAnnos.tsx b/src/client/views/SidebarAnnos.tsx
index a859dcab4..87887483f 100644
--- a/src/client/views/SidebarAnnos.tsx
+++ b/src/client/views/SidebarAnnos.tsx
@@ -71,7 +71,7 @@ export class SidebarAnnos extends React.Component<FieldViewProps & ExtraProps> {
filtersHeight = () => 50;
screenToLocalTransform = () => this.props.ScreenToLocalTransform().translate(Doc.NativeWidth(this.props.dataDoc), 0).scale(this.props.scaling?.() || 1);
panelWidth = () => !this.props.layoutDoc._showSidebar ? 0 : (NumCast(this.props.layoutDoc.nativeWidth) - Doc.NativeWidth(this.props.dataDoc)) * this.props.PanelWidth() / NumCast(this.props.layoutDoc.nativeWidth);
- panelHeight = () => this.props.PanelHeight() - this.filtersHeight() - 20;
+ panelHeight = () => this.props.PanelHeight() - this.filtersHeight();
addDocument = (doc: Doc | Doc[]) => this.props.sidebarAddDocument(doc, this.sidebarKey());
moveDocument = (doc: Doc | Doc[], targetCollection: Doc | undefined, addDocument: (doc: Doc | Doc[]) => boolean) => this.props.moveDocument(doc, targetCollection, addDocument, this.sidebarKey());
removeDocument = (doc: Doc | Doc[]) => this.props.removeDocument(doc, this.sidebarKey());
@@ -103,6 +103,10 @@ export class SidebarAnnos extends React.Component<FieldViewProps & ExtraProps> {
width: `${this.panelWidth()}px`,
height: "100%"
}}>
+ <div className="sidebarAnnos-tagList" style={{ height: this.filtersHeight(), width: this.panelWidth() }}>
+ {this.allUsers.map(renderUsers)}
+ {this.allHashtags.map(renderTag)}
+ </div>
<div style={{ width: "100%", height: this.panelHeight(), position: "relative" }}>
<CollectionStackingView {...OmitKeys(this.props, ["NativeWidth", "NativeHeight", "setContentView"]).omit} ref={this._stackRef}
NativeWidth={returnZero}
@@ -131,10 +135,6 @@ export class SidebarAnnos extends React.Component<FieldViewProps & ExtraProps> {
pointerEvents={"all"}
/>
</div>
- <div className="sidebarAnnos-tagList" style={{ height: this.filtersHeight(), width: this.panelWidth() }}>
- {this.allUsers.map(renderUsers)}
- {this.allHashtags.map(renderTag)}
- </div>
</div>;
}
} \ No newline at end of file
diff --git a/src/client/views/nodes/DocumentView.tsx b/src/client/views/nodes/DocumentView.tsx
index 830da9dc0..3f76db46d 100644
--- a/src/client/views/nodes/DocumentView.tsx
+++ b/src/client/views/nodes/DocumentView.tsx
@@ -85,6 +85,7 @@ export interface DocComponentView {
menuControls?: () => JSX.Element; // controls to display in the top menu bar when the document is selected.
getKeyFrameEditing?: () => boolean; // whether the document is in keyframe editing mode (if it is, then all hidden documents that are not active at the keyframe time will still be shown)
setKeyFrameEditing?: (set: boolean) => void; // whether the document is in keyframe editing mode (if it is, then all hidden documents that are not active at the keyframe time will still be shown)
+ playFrom?: (time: number, endTime?: number) => void;
}
export interface DocumentViewSharedProps {
renderDepth: number;
diff --git a/src/client/views/nodes/PresBox.tsx b/src/client/views/nodes/PresBox.tsx
index 6da215c5f..2aba461e0 100644
--- a/src/client/views/nodes/PresBox.tsx
+++ b/src/client/views/nodes/PresBox.tsx
@@ -230,17 +230,20 @@ export class PresBox extends ViewBoxBaseComponent<FieldViewProps, PresBoxSchema>
// 'Play on next' for audio or video therefore first navigate to the audio/video before it should be played
startTempMedia = (targetDoc: Doc, activeItem: Doc) => {
const duration: number = NumCast(activeItem.presEndTime) - NumCast(activeItem.presStartTime);
- if (targetDoc.type === DocumentType.AUDIO) {
- if (this._mediaTimer && this._mediaTimer[1] === targetDoc) clearTimeout(this._mediaTimer[0]);
- targetDoc._triggerAudio = NumCast(activeItem.presStartTime);
- this._mediaTimer = [setTimeout(() => targetDoc._audioStop = true, duration * 1000), targetDoc];
- } else if (targetDoc.type === DocumentType.VID) {
- if (this._mediaTimer && this._mediaTimer[1] === targetDoc) clearTimeout(this._mediaTimer[0]);
- targetDoc._triggerVideoStop = true;
- setTimeout(() => targetDoc._currentTimecode = NumCast(activeItem.presStartTime), 10);
- setTimeout(() => targetDoc._triggerVideo = true, 20);
- this._mediaTimer = [setTimeout(() => targetDoc._triggerVideoStop = true, (duration * 1000) + 20), targetDoc];
+ if ([DocumentType.VID, DocumentType.AUDIO].includes(targetDoc.type as any)) {
+ const targMedia = DocumentManager.Instance.getDocumentView(targetDoc);
+ targMedia?.ComponentView?.playFrom?.(NumCast(activeItem.presStartTime), NumCast(activeItem.presStartTime) + duration);
}
+ // if (targetDoc.type === DocumentType.AUDIO) {
+ // if (this._mediaTimer && this._mediaTimer[1] === targetDoc) clearTimeout(this._mediaTimer[0]);
+ // targetDoc._triggerAudio = NumCast(activeItem.presStartTime);
+ // this._mediaTimer = [setTimeout(() => targetDoc._audioStop = true, duration * 1000), targetDoc];
+ // } else if (targetDoc.type === DocumentType.VID) {
+ // targetDoc._triggerVideoStop = true;
+ // setTimeout(() => targetDoc._currentTimecode = NumCast(activeItem.presStartTime), 10);
+ // setTimeout(() => targetDoc._triggerVideo = true, 20);
+ // this._mediaTimer = [setTimeout(() => targetDoc._triggerVideoStop = true, (duration * 1000) + 20), targetDoc];
+ // }
}
stopTempMedia = (targetDoc: Doc) => {
diff --git a/src/client/views/nodes/ScreenshotBox.tsx b/src/client/views/nodes/ScreenshotBox.tsx
index 0e69725ca..a481cbbc5 100644
--- a/src/client/views/nodes/ScreenshotBox.tsx
+++ b/src/client/views/nodes/ScreenshotBox.tsx
@@ -24,6 +24,9 @@ import { VideoBox } from "./VideoBox";
import { Id } from "../../../fields/FieldSymbols";
import { CollectionStackedTimeline } from "../collections/CollectionStackedTimeline";
import { DateField } from "../../../fields/DateField";
+import { ComputedField } from "../../../fields/ScriptField";
+import { DocumentManager } from "../../util/DocumentManager";
+import { DocumentView } from "./DocumentView";
const path = require('path');
declare class MediaRecorder {
constructor(e: any, options?: any); // whatever MediaRecorder has
@@ -161,6 +164,8 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<FieldViewProps, S
_achunks: any;
_vrecorder: any;
_arecorder: any;
+ _dictation: Doc | undefined;
+ _dictationView: DocumentView | undefined;
toggleRecording = action(async () => {
this._screenCapture = !this._screenCapture;
@@ -190,17 +195,33 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<FieldViewProps, S
this.dataDoc[this.props.fieldKey] = new VideoField(Utils.prepend(result.accessPaths.agnostic.client));
} else alert("video conversion failed");
};
+ this._dictation = this.setupDictation();
+ setTimeout(() => this._dictationView = DocumentManager.Instance.getDocumentView(this._dictation!));
this._arecorder.start();
this._vrecorder.start();
+ this.dataDoc.audioState = "recording";
DocUtils.ActiveRecordings.push(this);
} else {
this._arecorder.stop();
this._vrecorder.stop();
+ this.dataDoc.audioState = "paused";
const ind = DocUtils.ActiveRecordings.indexOf(this);
ind !== -1 && (DocUtils.ActiveRecordings.splice(ind, 1));
}
});
+ setupDictation = () => {
+ const dictationText = CurrentUserUtils.GetNewTextDoc("",
+ NumCast(this.rootDoc.x), NumCast(this.rootDoc.y) + NumCast(this.layoutDoc._height) + 10,
+ NumCast(this.layoutDoc._width), 2 * NumCast(this.layoutDoc._height));
+ const dictationTextProto = Doc.GetProto(dictationText);
+ dictationTextProto.recordingSource = this.dataDoc;
+ dictationTextProto.recordingStart = ComputedField.MakeFunction(`self.recordingSource["${this.props.fieldKey}-recordingStart"]`);
+ dictationTextProto.audioState = ComputedField.MakeFunction("self.recordingSource.audioState");
+ this.props.addDocument?.(dictationText);
+ return dictationText;
+ }
+
private get uIButtons() {
return (<div className="screenshotBox-uiButtons">
<div className="screenshotBox-recorder" key="snap" onPointerDown={this.toggleRecording} >
diff --git a/src/client/views/nodes/formattedText/FormattedTextBox.tsx b/src/client/views/nodes/formattedText/FormattedTextBox.tsx
index 0c0b09b9a..47fad9112 100644
--- a/src/client/views/nodes/formattedText/FormattedTextBox.tsx
+++ b/src/client/views/nodes/formattedText/FormattedTextBox.tsx
@@ -61,7 +61,6 @@ import { RichTextRules } from "./RichTextRules";
import { schema } from "./schema_rts";
import { SummaryView } from "./SummaryView";
import applyDevTools = require("prosemirror-dev-tools");
-
import React = require("react");
const translateGoogleApi = require("translate-google-api");
@@ -636,6 +635,17 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp
this._downX = this._downY = Number.NaN;
}
+ breakupDictation = () => {
+ if (this._editorView) {
+ this.stopDictation(true);
+ this._break = true;
+ const state = this._editorView.state;
+ const to = state.selection.to;
+ const updated = TextSelection.create(state.doc, to, to);
+ this._editorView!.dispatch(this._editorView!.state.tr.setSelection(updated).insertText("\n", to));
+ if (this._recording) setTimeout(() => this.recordDictation(), 500);
+ }
+ }
recordDictation = () => {
DictationManager.Controls.listen({
interimHandler: this.setDictationContent,
@@ -654,7 +664,7 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp
if (this._recordingStart) {
let from = this._editorView.state.selection.from;
if (this._break) {
- const l = DocUtils.MakeLinkToActiveAudio(this.rootDoc);
+ const l = DocUtils.MakeLinkToActiveAudio(this.rootDoc, false);
if (!l) return;
const anchor = (l.anchor1 as Doc).annotationOn ? l.anchor1 as Doc : (l.anchor2 as Doc).annotationOn ? (l.anchor2 as Doc) : undefined;
if (!anchor) return;
@@ -758,6 +768,7 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp
this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link.
this.props.contentsActive?.(this.active);
this._cachedLinks = DocListCast(this.Document.links);
+ this._disposers.breakupDictation = reaction(() => DocumentManager.Instance.RecordingEvent, () => this.breakupDictation());
this._disposers.autoHeight = reaction(() => this.autoHeight, autoHeight => autoHeight && this.tryUpdateScrollHeight());
this._disposers.scrollHeight = reaction(() => ({ scrollHeight: this.scrollHeight, autoHeight: this.autoHeight, width: NumCast(this.layoutDoc._width) }),
({ width, scrollHeight, autoHeight }) => width && autoHeight && this.resetNativeHeight(scrollHeight)
@@ -842,7 +853,8 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp
setTimeout(() => this.recordDictation(), 500);
}), 500);
} else setTimeout(() => this.stopDictation(true), 0);
- }
+ },
+ { fireImmediately: true }
);
}
var quickScroll: string | undefined = "";
@@ -1122,21 +1134,22 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp
DocServer.GetRefField(audioid).then(anchor => {
if (anchor instanceof Doc) {
const audiodoc = anchor.annotationOn as Doc;
- audiodoc._triggerAudio = Number(time);
- !DocumentManager.Instance.getDocumentView(audiodoc) && this.props.addDocTab(audiodoc, "add:bottom");
+ const func = () => {
+ const docView = DocumentManager.Instance.getDocumentView(audiodoc);
+ if (!docView) {
+ this.props.addDocTab(audiodoc, "add:bottom");
+ setTimeout(() => func());
+ }
+ else docView.ComponentView?.playFrom?.(Number(time), Number(time) + 3); // bcz: would be nice to find the next audio tag in the doc and play until that
+ }
+ func();
}
});
}
if (this._recording && !e.ctrlKey && e.button === 0) {
- this.stopDictation(true);
- this._break = true;
- const state = this._editorView!.state;
- const to = state.selection.to;
- const updated = TextSelection.create(state.doc, to, to);
- this._editorView!.dispatch(this._editorView!.state.tr.setSelection(updated).insertText("\n", to));
+ this.breakupDictation();
e.preventDefault();
e.stopPropagation();
- if (this._recording) setTimeout(() => this.recordDictation(), 500);
}
this._downX = e.clientX;
this._downY = e.clientY;