aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorbobzel <zzzman@gmail.com>2021-03-24 03:50:41 -0400
committerbobzel <zzzman@gmail.com>2021-03-24 03:50:41 -0400
commit8f8a12f6e81482d6cbc4789c3b7f74015f33f423 (patch)
treed4c04e95018c1cc1fa4ae66b2b568742d00ae3e9
parentccdef5784dd963b71f493116d87786c0b0a787d2 (diff)
fixes for audiotags so that they can be navigated to when focusing. fixes to dictation to start/stop properly. added "RecodingEvents" to trigger dictation waypoints.
-rw-r--r--src/client/util/DictationManager.ts28
-rw-r--r--src/client/views/GlobalKeyHandler.ts2
-rw-r--r--src/client/views/LightboxView.tsx3
-rw-r--r--src/client/views/nodes/ScreenshotBox.tsx2
-rw-r--r--src/client/views/nodes/formattedText/FormattedTextBox.tsx84
-rw-r--r--src/client/views/nodes/formattedText/nodes_rts.ts8
6 files changed, 73 insertions, 54 deletions
diff --git a/src/client/util/DictationManager.ts b/src/client/util/DictationManager.ts
index c6b654dda..f00cdce1e 100644
--- a/src/client/util/DictationManager.ts
+++ b/src/client/util/DictationManager.ts
@@ -84,7 +84,13 @@ export namespace DictationManager {
terminators: string[];
}
+ let pendingListen: Promise<string> | string | undefined;
+
export const listen = async (options?: Partial<ListeningOptions>) => {
+ if (pendingListen instanceof Promise) return pendingListen.then(pl => innerListen(options));
+ return innerListen(options);
+ }
+ const innerListen = async (options?: Partial<ListeningOptions>) => {
let results: string | undefined;
const overlay = options?.useOverlay;
@@ -94,7 +100,8 @@ export namespace DictationManager {
}
try {
- results = await listenImpl(options);
+ results = await (pendingListen = listenImpl(options));
+ pendingListen = undefined;
if (results) {
Utils.CopyText(results);
if (overlay) {
@@ -106,6 +113,7 @@ export namespace DictationManager {
options?.tryExecute && await DictationManager.Commands.execute(results);
}
} catch (e) {
+ console.log(e);
if (overlay) {
DictationOverlay.Instance.isListening = false;
DictationOverlay.Instance.dictatedPhrase = results = `dictation error: ${"error" in e ? e.error : "unknown error"}`;
@@ -146,7 +154,8 @@ export namespace DictationManager {
recognizer.onerror = (e: any) => { // e is SpeechRecognitionError but where is that defined?
if (!(indefinite && e.error === "no-speech")) {
recognizer.stop();
- reject(e);
+ resolve(e);
+ //reject(e);
}
};
@@ -158,8 +167,8 @@ export namespace DictationManager {
recognizer.abort();
return complete();
}
- handler && handler(current);
- isManuallyStopped && complete();
+ !isManuallyStopped && handler?.(current);
+ //isManuallyStopped && complete();
};
recognizer.onend = (e: Event) => {
@@ -168,7 +177,7 @@ export namespace DictationManager {
}
if (current) {
- sessionResults.push(current);
+ !isManuallyStopped && sessionResults.push(current);
current = undefined;
}
recognizer.start();
@@ -199,14 +208,7 @@ export namespace DictationManager {
}
isListening = false;
isManuallyStopped = true;
- salvageSession ? recognizer.stop() : recognizer.abort();
- // let main = MainView.Instance;
- // if (main.dictationOverlayVisible) {
- // main.cancelDictationFade();
- // main.dictationOverlayVisible = false;
- // main.dictationSuccess = undefined;
- // setTimeout(() => main.dictatedPhrase = placeholder, 500);
- // }
+ recognizer.stop(); // salvageSession ? recognizer.stop() : recognizer.abort();
};
const synthesize = (e: SpeechRecognitionEvent, delimiter?: string) => {
diff --git a/src/client/views/GlobalKeyHandler.ts b/src/client/views/GlobalKeyHandler.ts
index f387546af..671c0c507 100644
--- a/src/client/views/GlobalKeyHandler.ts
+++ b/src/client/views/GlobalKeyHandler.ts
@@ -132,7 +132,7 @@ export class KeyManager {
SelectionManager.DeselectAll();
LightboxView.SetLightboxDoc(undefined);
}
- DictationManager.Controls.stop();
+ // DictationManager.Controls.stop();
GoogleAuthenticationManager.Instance.cancel();
SharingManager.Instance.close();
GroupManager.Instance.close();
diff --git a/src/client/views/LightboxView.tsx b/src/client/views/LightboxView.tsx
index 48b8ca341..5715b62b0 100644
--- a/src/client/views/LightboxView.tsx
+++ b/src/client/views/LightboxView.tsx
@@ -116,7 +116,8 @@ export class LightboxView extends React.Component<LightboxViewProps> {
const target = LightboxView._docTarget = LightboxView._future?.pop();
const docView = target && DocumentManager.Instance.getLightboxDocumentView(target);
if (docView && target) {
- DocUtils.MakeLinkToActiveAudio(target);
+ const l = DocUtils.MakeLinkToActiveAudio(target);
+ l && (Cast(l.anchor2, Doc, null).backgroundColor = "lightgreen");
docView.focus(target, { originalTarget: target, willZoom: true, scale: 0.9 });
if (LightboxView._history?.lastElement().target !== target) LightboxView._history?.push({ doc, target });
} else {
diff --git a/src/client/views/nodes/ScreenshotBox.tsx b/src/client/views/nodes/ScreenshotBox.tsx
index a481cbbc5..8e1a43fd1 100644
--- a/src/client/views/nodes/ScreenshotBox.tsx
+++ b/src/client/views/nodes/ScreenshotBox.tsx
@@ -218,7 +218,7 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<FieldViewProps, S
dictationTextProto.recordingSource = this.dataDoc;
dictationTextProto.recordingStart = ComputedField.MakeFunction(`self.recordingSource["${this.props.fieldKey}-recordingStart"]`);
dictationTextProto.audioState = ComputedField.MakeFunction("self.recordingSource.audioState");
- this.props.addDocument?.(dictationText);
+ this.props.addDocument?.(dictationText) || this.props.addDocTab(dictationText, "add:bottom");
return dictationText;
}
diff --git a/src/client/views/nodes/formattedText/FormattedTextBox.tsx b/src/client/views/nodes/formattedText/FormattedTextBox.tsx
index 5ea24d185..85c2a7cb3 100644
--- a/src/client/views/nodes/formattedText/FormattedTextBox.tsx
+++ b/src/client/views/nodes/formattedText/FormattedTextBox.tsx
@@ -304,11 +304,13 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp
insertTime = () => {
let linkTime;
let linkAnchor;
+ let link;
DocListCast(this.dataDoc.links).forEach((l, i) => {
const anchor = (l.anchor1 as Doc).annotationOn ? l.anchor1 as Doc : (l.anchor2 as Doc).annotationOn ? (l.anchor2 as Doc) : undefined;
if (anchor && (anchor.annotationOn as Doc).audioState === "recording") {
linkTime = NumCast(anchor._timecodeToShow /* audioStart */);
linkAnchor = anchor;
+ link = l;
}
});
if (this._editorView && linkTime) {
@@ -636,14 +638,16 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp
}
breakupDictation = () => {
- if (this._editorView) {
+ if (this._editorView && this._recording) {
this.stopDictation(true);
this._break = true;
const state = this._editorView.state;
const to = state.selection.to;
const updated = TextSelection.create(state.doc, to, to);
- this._editorView!.dispatch(this._editorView!.state.tr.setSelection(updated).insertText("\n", to));
- if (this._recording) setTimeout(() => this.recordDictation(), 500);
+ this._editorView.dispatch(state.tr.setSelection(updated).insertText("\n", to));
+ if (this._recording) {
+ this.recordDictation();
+ }
}
}
recordDictation = () => {
@@ -659,25 +663,29 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp
stopDictation = (abort: boolean) => DictationManager.Controls.stop(!abort);
setDictationContent = (value: string) => {
- if (this._editorView) {
- const state = this._editorView.state;
- if (this._recordingStart) {
- let from = this._editorView.state.selection.from;
- if (this._break) {
- const l = DocUtils.MakeLinkToActiveAudio(this.rootDoc, false);
- if (!l) return;
- const anchor = (l.anchor1 as Doc).annotationOn ? l.anchor1 as Doc : (l.anchor2 as Doc).annotationOn ? (l.anchor2 as Doc) : undefined;
- if (!anchor) return;
- const timeCode = NumCast(anchor._timecodeToShow /* audioStart */);
- const audiotag = this._editorView.state.schema.nodes.audiotag.create({ timeCode, audioId: anchor[Id] });
- this._editorView.dispatch(this._editorView.state.tr.insert(state.selection.from, audiotag));
- from = this._editorView.state.doc.content.size;
- this._editorView.dispatch(this._editorView.state.tr.setSelection(TextSelection.create(this._editorView.state.tr.doc, from)));
- }
- this._break = false;
- const tr = this._editorView.state.tr.insertText(value);
- this._editorView.dispatch(tr.setSelection(TextSelection.create(tr.doc, from, tr.doc.content.size)).scrollIntoView());
+ if (this._editorView && this._recordingStart) {
+ if (this._break) {
+ const textanchor = Docs.Create.TextanchorDocument({ title: "dictation anchor" });
+ this.addDocument(textanchor);
+ const link = DocUtils.MakeLinkToActiveAudio(textanchor, false);
+ if (!link) return;
+ const audioanchor = Cast(link.anchor2, Doc, null);
+ if (!audioanchor) return;
+ audioanchor.backgroundColor = "tan";
+ const audiotag = this._editorView.state.schema.nodes.audiotag.create({
+ timeCode: NumCast(audioanchor._timecodeToShow),
+ audioId: audioanchor[Id],
+ textId: textanchor[Id]
+ });
+ Doc.GetProto(textanchor).title = "dictation:" + audiotag.attrs.timeCode;
+ const tr = this._editorView.state.tr.insert(this._editorView.state.doc.content.size, audiotag);
+ const tr2 = tr.setSelection(TextSelection.create(tr.doc, tr.doc.content.size));
+ this._editorView.dispatch(tr.setSelection(TextSelection.create(tr2.doc, tr2.doc.content.size)));
}
+ const from = this._editorView.state.selection.from;
+ this._break = false;
+ const tr = this._editorView.state.tr.insertText(value);
+ this._editorView.dispatch(tr.setSelection(TextSelection.create(tr.doc, from, tr.doc.content.size)).scrollIntoView());
}
}
@@ -709,14 +717,14 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp
return anchorDoc ?? this.rootDoc;
}
- scrollFocus = (doc: Doc, smooth: boolean) => {
- const anchorId = doc[Id];
+ scrollFocus = (textAnchor: Doc, smooth: boolean) => {
+ const textAnchorId = textAnchor[Id];
const findAnchorFrag = (frag: Fragment, editor: EditorView) => {
const nodes: Node[] = [];
let hadStart = start !== 0;
frag.forEach((node, index) => {
const examinedNode = findAnchorNode(node, editor);
- if (examinedNode?.node.textContent) {
+ if (examinedNode?.node && (examinedNode.node.textContent || examinedNode.node.type === this._editorView?.state.schema.nodes.audiotag)) {
nodes.push(examinedNode.node);
!hadStart && (start = index + examinedNode.start);
hadStart = true;
@@ -725,28 +733,35 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp
return { frag: Fragment.fromArray(nodes), start };
};
const findAnchorNode = (node: Node, editor: EditorView) => {
+ if (node.type === this._editorView?.state.schema.nodes.audiotag) {
+ if (node.attrs.textId === textAnchorId) {
+ return { node, start: 0 };
+ }
+ return undefined;
+ }
if (!node.isText) {
const content = findAnchorFrag(node.content, editor);
return { node: node.copy(content.frag), start: content.start };
}
const marks = [...node.marks];
const linkIndex = marks.findIndex(mark => mark.type === editor.state.schema.marks.linkAnchor);
- return linkIndex !== -1 && marks[linkIndex].attrs.allAnchors.find((item: { href: string }) => anchorId === item.href.replace(/.*\/doc\//, "")) ? { node, start: 0 } : undefined;
+ return linkIndex !== -1 && marks[linkIndex].attrs.allAnchors.find((item: { href: string }) => textAnchorId === item.href.replace(/.*\/doc\//, "")) ? { node, start: 0 } : undefined;
};
let start = 0;
- if (this._editorView && anchorId) {
+ if (this._editorView && textAnchorId) {
const editor = this._editorView;
const ret = findAnchorFrag(editor.state.doc.content, editor);
- if (ret.frag.size > 2 && ret.start >= 0) {
+ const content = (ret.frag as any)?.content;
+ if ((ret.frag.size > 2 || (content?.length && content[0].type === this._editorView.state.schema.nodes.audiotag)) && ret.start >= 0) {
smooth && (this._focusSpeed = 500);
let selection = TextSelection.near(editor.state.doc.resolve(ret.start)); // default to near the start
if (ret.frag.firstChild) {
selection = TextSelection.between(editor.state.doc.resolve(ret.start), editor.state.doc.resolve(ret.start + ret.frag.firstChild.nodeSize)); // bcz: looks better to not have the target selected
}
editor.dispatch(editor.state.tr.setSelection(new TextSelection(selection.$from, selection.$from)).scrollIntoView());
- const escAnchorId = anchorId[0] >= '0' && anchorId[0] <= '9' ? `\\3${anchorId[0]} ${anchorId.substr(1)}` : anchorId;
+ const escAnchorId = textAnchorId[0] >= '0' && textAnchorId[0] <= '9' ? `\\3${textAnchorId[0]} ${textAnchorId.substr(1)}` : textAnchorId;
addStyleSheetRule(FormattedTextBox._highlightStyleSheet, `${escAnchorId}`, { background: "yellow" });
setTimeout(() => this._focusSpeed = undefined, this._focusSpeed);
setTimeout(() => clearStyleSheetRules(FormattedTextBox._highlightStyleSheet), Math.max(this._focusSpeed || 0, 1500));
@@ -768,7 +783,7 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp
this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link.
this.props.contentsActive?.(this.active);
this._cachedLinks = DocListCast(this.Document.links);
- this._disposers.breakupDictation = reaction(() => DocumentManager.Instance.RecordingEvent, () => this.breakupDictation());
+ this._disposers.breakupDictation = reaction(() => DocumentManager.Instance.RecordingEvent, this.breakupDictation);
this._disposers.autoHeight = reaction(() => this.autoHeight, autoHeight => autoHeight && this.tryUpdateScrollHeight());
this._disposers.scrollHeight = reaction(() => ({ scrollHeight: this.scrollHeight, autoHeight: this.autoHeight, width: NumCast(this.layoutDoc._width) }),
({ width, scrollHeight, autoHeight }) => width && autoHeight && this.resetNativeHeight(scrollHeight)
@@ -838,7 +853,6 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp
this._disposers.selected = reaction(() => this.props.isSelected(),
action((selected) => {
- this._recording = false;
if (RichTextMenu.Instance?.view === this._editorView && !selected) {
RichTextMenu.Instance?.updateMenu(undefined, undefined, undefined);
}
@@ -847,15 +861,13 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp
if (!this.props.dontRegisterView) {
this._disposers.record = reaction(() => this._recording,
() => {
+ this.stopDictation(true);
if (this._recording) {
- setTimeout(action(() => {
- this.stopDictation(true);
- setTimeout(() => this.recordDictation(), 500);
- }), 500);
- } else setTimeout(() => this.stopDictation(true), 0);
+ this.recordDictation();
+ }
},
- { fireImmediately: true }
);
+ if (this._recording) setTimeout(() => this.recordDictation());
}
var quickScroll: string | undefined = "";
this._disposers.scroll = reaction(() => NumCast(this.layoutDoc._scrollTop),
diff --git a/src/client/views/nodes/formattedText/nodes_rts.ts b/src/client/views/nodes/formattedText/nodes_rts.ts
index df93fc117..2fe0a67cb 100644
--- a/src/client/views/nodes/formattedText/nodes_rts.ts
+++ b/src/client/views/nodes/formattedText/nodes_rts.ts
@@ -28,14 +28,17 @@ export const nodes: { [index: string]: NodeSpec } = {
group: "block",
attrs: {
timeCode: { default: 0 },
- audioId: { default: "" }
+ audioId: { default: "" },
+ textId: { default: "" }
},
toDOM(node) {
return ['audiotag',
{
+ class: node.attrs.textId,
// style: see FormattedTextBox.scss
"data-timecode": node.attrs.timeCode,
"data-audioid": node.attrs.audioId,
+ "data-textid": node.attrs.textId,
},
formatAudioTime(node.attrs.timeCode.toString())
];
@@ -45,7 +48,8 @@ export const nodes: { [index: string]: NodeSpec } = {
tag: "audiotag", getAttrs(dom: any) {
return {
timeCode: dom.getAttribute("data-timecode"),
- audioId: dom.getAttribute("data-audioid")
+ audioId: dom.getAttribute("data-audioid"),
+ textId: dom.getAttribute("data-textid")
};
}
},