From 4b2f131d91e71f4514642b5846713e9c3b68210b Mon Sep 17 00:00:00 2001 From: bobzel Date: Mon, 8 Aug 2022 12:54:06 -0400 Subject: fixed autolinking to not match partial word substrings. restored textbox dictation. --- src/client/util/DictationManager.ts | 439 ++++++++++++++++++------------------ 1 file changed, 221 insertions(+), 218 deletions(-) (limited to 'src/client/util/DictationManager.ts') diff --git a/src/client/util/DictationManager.ts b/src/client/util/DictationManager.ts index a6dcda4bc..13f036838 100644 --- a/src/client/util/DictationManager.ts +++ b/src/client/util/DictationManager.ts @@ -1,37 +1,35 @@ -import * as interpreter from "words-to-numbers"; +import * as interpreter from 'words-to-numbers'; // @ts-ignore bcz: how are you supposed to include these definitions since dom-speech-recognition isn't a module? -import type { } from "@types/dom-speech-recognition"; -import { Doc, Opt } from "../../fields/Doc"; -import { List } from "../../fields/List"; -import { RichTextField } from "../../fields/RichTextField"; -import { listSpec } from "../../fields/Schema"; -import { Cast, CastCtor } from "../../fields/Types"; -import { AudioField, ImageField } from "../../fields/URLField"; -import { Utils } from "../../Utils"; -import { Docs } from "../documents/Documents"; -import { DocumentType } from "../documents/DocumentTypes"; -import { DictationOverlay } from "../views/DictationOverlay"; -import { DocumentView } from "../views/nodes/DocumentView"; -import { SelectionManager } from "./SelectionManager"; -import { UndoManager } from "./UndoManager"; - +import type {} from '@types/dom-speech-recognition'; +import { Doc, Opt } from '../../fields/Doc'; +import { List } from '../../fields/List'; +import { RichTextField } from '../../fields/RichTextField'; +import { listSpec } from '../../fields/Schema'; +import { Cast, CastCtor } from '../../fields/Types'; +import { AudioField, ImageField } from '../../fields/URLField'; +import { Utils } from '../../Utils'; +import { Docs } from '../documents/Documents'; +import { DocumentType } from '../documents/DocumentTypes'; +import { DictationOverlay } from '../views/DictationOverlay'; +import { DocumentView } from '../views/nodes/DocumentView'; +import { SelectionManager } from './SelectionManager'; +import { UndoManager } from './UndoManager'; /** * This namespace provides a singleton instance of a manager that * handles the listening and text-conversion of user speech. - * + * * The basic manager functionality can be attained by the DictationManager.Controls namespace, which provide * a simple recording operation that returns the interpreted text as a string. - * + * * Additionally, however, the DictationManager also exposes the ability to execute voice commands within Dash. * It stores a default library of registered commands that can be triggered by listen()'ing for a phrase and then * passing the results into the execute() function. - * + * * In addition to compile-time default commands, you can invoke DictationManager.Commands.Register(Independent|Dependent) * to add new commands as classes or components are constructed. */ export namespace DictationManager { - /** * Some type maneuvering to access Webkit's built-in * speech recognizer. @@ -42,27 +40,26 @@ export namespace DictationManager { } } const { webkitSpeechRecognition }: CORE.IWindow = window as any as CORE.IWindow; - export const placeholder = "Listening..."; + export const placeholder = 'Listening...'; export namespace Controls { - - export const Infringed = "unable to process: dictation manager still involved in previous session"; + export const Infringed = 'unable to process: dictation manager still involved in previous session'; const browser = (() => { const identifier = navigator.userAgent.toLowerCase(); - if (identifier.indexOf("safari") >= 0) { - return "Safari"; + if (identifier.indexOf('safari') >= 0) { + return 'Safari'; } - if (identifier.indexOf("chrome") >= 0) { - return "Chrome"; + if (identifier.indexOf('chrome') >= 0) { + return 'Chrome'; } - if (identifier.indexOf("firefox") >= 0) { - return "Firefox"; + if (identifier.indexOf('firefox') >= 0) { + return 'Firefox'; } - return "Unidentified Browser"; + return 'Unidentified Browser'; })(); const unsupported = `listening is not supported in ${browser}`; - const intraSession = ". "; - const interSession = " ... "; + const intraSession = '. '; + const interSession = ' ... '; export let isListening = false; let isManuallyStopped = false; @@ -74,7 +71,7 @@ export namespace DictationManager { export type InterimResultHandler = (results: string) => any; export type ContinuityArgs = { indefinite: boolean } | false; - export type DelimiterArgs = { inter: string, intra: string }; + export type DelimiterArgs = { inter: string; intra: string }; export type ListeningUIStatus = { interim: boolean } | false; export interface ListeningOptions { @@ -105,21 +102,21 @@ export namespace DictationManager { try { results = await (pendingListen = listenImpl(options)); pendingListen = undefined; - // if (results) { - // Utils.CopyText(results); - // if (overlay) { - // DictationOverlay.Instance.isListening = false; - // const execute = options?.tryExecute; - // DictationOverlay.Instance.dictatedPhrase = execute ? results.toLowerCase() : results; - // DictationOverlay.Instance.dictationSuccess = execute ? await DictationManager.Commands.execute(results) : true; - // } - // options?.tryExecute && await DictationManager.Commands.execute(results); - // } + if (results) { + Utils.CopyText(results); + if (overlay) { + DictationOverlay.Instance.isListening = false; + const execute = options?.tryExecute; + DictationOverlay.Instance.dictatedPhrase = execute ? results.toLowerCase() : results; + DictationOverlay.Instance.dictationSuccess = execute ? await DictationManager.Commands.execute(results) : true; + } + options?.tryExecute && (await DictationManager.Commands.execute(results)); + } } catch (e: any) { console.log(e); if (overlay) { DictationOverlay.Instance.isListening = false; - DictationOverlay.Instance.dictatedPhrase = results = `dictation error: ${"error" in e ? e.error : "unknown error"}`; + DictationOverlay.Instance.dictatedPhrase = results = `dictation error: ${'error' in e ? e.error : 'unknown error'}`; DictationOverlay.Instance.dictationSuccess = false; } } finally { @@ -131,7 +128,7 @@ export namespace DictationManager { const listenImpl = (options?: Partial) => { if (!recognizer) { - console.log("DictationManager:" + unsupported); + console.log('DictationManager:' + unsupported); return unsupported; } if (isListening) { @@ -146,16 +143,18 @@ export namespace DictationManager { const intra = options?.delimiters?.intra; const inter = options?.delimiters?.inter; - recognizer.onstart = () => console.log("initiating speech recognition session..."); + recognizer.onstart = () => console.log('initiating speech recognition session...'); recognizer.interimResults = handler !== undefined; recognizer.continuous = continuous === undefined ? false : continuous !== false; - recognizer.lang = language === undefined ? "en-US" : language; + recognizer.lang = language === undefined ? 'en-US' : language; recognizer.start(); return new Promise((resolve, reject) => { - recognizer.onerror = (e: any) => { // e is SpeechRecognitionError but where is that defined? - if (!(indefinite && e.error === "no-speech")) { + recognizer.onerror = (e: any) => { + console.log('SPEECH error:', encodeURIComponent); + // e is SpeechRecognitionError but where is that defined? + if (!(indefinite && e.error === 'no-speech')) { recognizer.stop(); resolve(e); //reject(e); @@ -163,9 +162,10 @@ export namespace DictationManager { }; recognizer.onresult = (e: SpeechRecognitionEvent) => { + console.log('RESULT: ', e); current = synthesize(e, intra); let matchedTerminator: string | undefined; - if (options?.terminators && (matchedTerminator = options.terminators.find(end => current ? current.trim().toLowerCase().endsWith(end.toLowerCase()) : false))) { + if (options?.terminators && (matchedTerminator = options.terminators.find(end => (current ? current.trim().toLowerCase().endsWith(end.toLowerCase()) : false)))) { current = matchedTerminator; recognizer.abort(); return complete(); @@ -175,6 +175,7 @@ export namespace DictationManager { }; recognizer.onend = (e: Event) => { + console.log('END: ', e); if (!indefinite || isManuallyStopped) { return complete(); } @@ -187,11 +188,12 @@ export namespace DictationManager { }; const complete = () => { + console.log('COMPLETE:'); if (indefinite) { current && sessionResults.push(current); sessionResults.length && resolve(sessionResults.join(inter || interSession)); } else { - resolve(current || ""); + resolve(current || ''); } current = undefined; sessionResults = []; @@ -201,7 +203,6 @@ export namespace DictationManager { recognizer.onerror = null; recognizer.onend = null; }; - }); }; @@ -222,171 +223,173 @@ export namespace DictationManager { } return transcripts.join(delimiter || intraSession); }; - } - // export namespace Commands { - - // export const dictationFadeDuration = 2000; - - // export type IndependentAction = (target: DocumentView) => any | Promise; - // export type IndependentEntry = { action: IndependentAction, restrictTo?: DocumentType[] }; - - // export type DependentAction = (target: DocumentView, matches: RegExpExecArray) => any | Promise; - // export type DependentEntry = { expression: RegExp, action: DependentAction, restrictTo?: DocumentType[] }; - - // export const RegisterIndependent = (key: string, value: IndependentEntry) => Independent.set(key, value); - // export const RegisterDependent = (entry: DependentEntry) => Dependent.push(entry); - - // export const execute = async (phrase: string) => { - // return UndoManager.RunInBatch(async () => { - // const targets = SelectionManager.Views(); - // if (!targets || !targets.length) { - // return; - // } - - // phrase = phrase.toLowerCase(); - // const entry = Independent.get(phrase); - - // if (entry) { - // let success = false; - // const restrictTo = entry.restrictTo; - // for (const target of targets) { - // if (!restrictTo || validate(target, restrictTo)) { - // await entry.action(target); - // success = true; - // } - // } - // return success; - // } - - // for (const entry of Dependent) { - // const regex = entry.expression; - // const matches = regex.exec(phrase); - // regex.lastIndex = 0; - // if (matches !== null) { - // let success = false; - // const restrictTo = entry.restrictTo; - // for (const target of targets) { - // if (!restrictTo || validate(target, restrictTo)) { - // await entry.action(target, matches); - // success = true; - // } - // } - // return success; - // } - // } - - // return false; - // }, "Execute Command"); - // }; - - // const ConstructorMap = new Map([ - // [DocumentType.COL, listSpec(Doc)], - // [DocumentType.AUDIO, AudioField], - // [DocumentType.IMG, ImageField], - // [DocumentType.IMPORT, listSpec(Doc)], - // [DocumentType.RTF, "string"] - // ]); - - // const tryCast = (view: DocumentView, type: DocumentType) => { - // const ctor = ConstructorMap.get(type); - // if (!ctor) { - // return false; - // } - // return Cast(Doc.GetProto(view.props.Document).data, ctor) !== undefined; - // }; - - // const validate = (target: DocumentView, types: DocumentType[]) => { - // for (const type of types) { - // if (tryCast(target, type)) { - // return true; - // } - // } - // return false; - // }; - - // const interpretNumber = (number: string) => { - // const initial = parseInt(number); - // if (!isNaN(initial)) { - // return initial; - // } - // const converted = interpreter.wordsToNumbers(number, { fuzzy: true }); - // if (converted === null) { - // return NaN; - // } - // return typeof converted === "string" ? parseInt(converted) : converted; - // }; - - // const Independent = new Map([ - - // ["clear", { - // action: (target: DocumentView) => Doc.GetProto(target.props.Document).data = new List(), - // restrictTo: [DocumentType.COL] - // }], - - // ["open fields", { - // action: (target: DocumentView) => { - // const kvp = Docs.Create.KVPDocument(target.props.Document, { _width: 300, _height: 300 }); - // target.props.addDocTab(kvp, "add:right"); - // } - // }], - - // ["new outline", { - // action: (target: DocumentView) => { - // const newBox = Docs.Create.TextDocument("", { _width: 400, _height: 200, title: "My Outline", _autoHeight: true }); - // const proto = newBox.proto!; - // const prompt = "Press alt + r to start dictating here..."; - // const head = 3; - // const anchor = head + prompt.length; - // const proseMirrorState = `{"doc":{"type":"doc","content":[{"type":"ordered_list","content":[{"type":"list_item","content":[{"type":"paragraph","content":[{"type":"text","text":"${prompt}"}]}]}]}]},"selection":{"type":"text","anchor":${anchor},"head":${head}}}`; - // proto.data = new RichTextField(proseMirrorState); - // proto.backgroundColor = "#eeffff"; - // target.props.addDocTab(newBox, "add:right"); - // } - // }] - - // ]); - - // const Dependent = new Array( - - // { - // expression: /create (\w+) documents of type (image|nested collection)/g, - // action: (target: DocumentView, matches: RegExpExecArray) => { - // const count = interpretNumber(matches[1]); - // const what = matches[2]; - // const dataDoc = Doc.GetProto(target.props.Document); - // const fieldKey = "data"; - // if (isNaN(count)) { - // return; - // } - // for (let i = 0; i < count; i++) { - // let created: Doc | undefined; - // switch (what) { - // case "image": - // created = Docs.Create.ImageDocument("https://upload.wikimedia.org/wikipedia/commons/thumb/3/3a/Cat03.jpg/1200px-Cat03.jpg"); - // break; - // case "nested collection": - // created = Docs.Create.FreeformDocument([], {}); - // break; - // } - // created && Doc.AddDocToList(dataDoc, fieldKey, created); - // } - // }, - // restrictTo: [DocumentType.COL] - // }, - - // { - // expression: /view as (freeform|stacking|masonry|schema|tree)/g, - // action: (target: DocumentView, matches: RegExpExecArray) => { - // const mode = matches[1]; - // mode && (target.props.Document._viewType = mode); - // }, - // restrictTo: [DocumentType.COL] - // } - - // ); - - // } - -} \ No newline at end of file + export namespace Commands { + export const dictationFadeDuration = 2000; + + export type IndependentAction = (target: DocumentView) => any | Promise; + export type IndependentEntry = { action: IndependentAction; restrictTo?: DocumentType[] }; + + export type DependentAction = (target: DocumentView, matches: RegExpExecArray) => any | Promise; + export type DependentEntry = { expression: RegExp; action: DependentAction; restrictTo?: DocumentType[] }; + + export const RegisterIndependent = (key: string, value: IndependentEntry) => Independent.set(key, value); + export const RegisterDependent = (entry: DependentEntry) => Dependent.push(entry); + + export const execute = async (phrase: string) => { + return UndoManager.RunInBatch(async () => { + console.log('PHRASE: ' + phrase); + const targets = SelectionManager.Views(); + if (!targets || !targets.length) { + return; + } + + phrase = phrase.toLowerCase(); + const entry = Independent.get(phrase); + + if (entry) { + let success = false; + const restrictTo = entry.restrictTo; + for (const target of targets) { + if (!restrictTo || validate(target, restrictTo)) { + await entry.action(target); + success = true; + } + } + return success; + } + + for (const entry of Dependent) { + const regex = entry.expression; + const matches = regex.exec(phrase); + regex.lastIndex = 0; + if (matches !== null) { + let success = false; + const restrictTo = entry.restrictTo; + for (const target of targets) { + if (!restrictTo || validate(target, restrictTo)) { + await entry.action(target, matches); + success = true; + } + } + return success; + } + } + + return false; + }, 'Execute Command'); + }; + + const ConstructorMap = new Map([ + [DocumentType.COL, listSpec(Doc)], + [DocumentType.AUDIO, AudioField], + [DocumentType.IMG, ImageField], + [DocumentType.IMPORT, listSpec(Doc)], + [DocumentType.RTF, 'string'], + ]); + + const tryCast = (view: DocumentView, type: DocumentType) => { + const ctor = ConstructorMap.get(type); + if (!ctor) { + return false; + } + return Cast(Doc.GetProto(view.props.Document).data, ctor) !== undefined; + }; + + const validate = (target: DocumentView, types: DocumentType[]) => { + for (const type of types) { + if (tryCast(target, type)) { + return true; + } + } + return false; + }; + + const interpretNumber = (number: string) => { + const initial = parseInt(number); + if (!isNaN(initial)) { + return initial; + } + const converted = interpreter.wordsToNumbers(number, { fuzzy: true }); + if (converted === null) { + return NaN; + } + return typeof converted === 'string' ? parseInt(converted) : converted; + }; + + const Independent = new Map([ + [ + 'clear', + { + action: (target: DocumentView) => (Doc.GetProto(target.props.Document).data = new List()), + restrictTo: [DocumentType.COL], + }, + ], + + [ + 'open fields', + { + action: (target: DocumentView) => { + const kvp = Docs.Create.KVPDocument(target.props.Document, { _width: 300, _height: 300 }); + target.props.addDocTab(kvp, 'add:right'); + }, + }, + ], + + [ + 'new outline', + { + action: (target: DocumentView) => { + const newBox = Docs.Create.TextDocument('', { _width: 400, _height: 200, title: 'My Outline', _autoHeight: true }); + const proto = newBox.proto!; + const prompt = 'Press alt + r to start dictating here...'; + const head = 3; + const anchor = head + prompt.length; + const proseMirrorState = `{"doc":{"type":"doc","content":[{"type":"ordered_list","content":[{"type":"list_item","content":[{"type":"paragraph","content":[{"type":"text","text":"${prompt}"}]}]}]}]},"selection":{"type":"text","anchor":${anchor},"head":${head}}}`; + proto.data = new RichTextField(proseMirrorState); + proto.backgroundColor = '#eeffff'; + target.props.addDocTab(newBox, 'add:right'); + }, + }, + ], + ]); + + const Dependent = new Array( + { + expression: /create (\w+) documents of type (image|nested collection)/g, + action: (target: DocumentView, matches: RegExpExecArray) => { + const count = interpretNumber(matches[1]); + const what = matches[2]; + const dataDoc = Doc.GetProto(target.props.Document); + const fieldKey = 'data'; + if (isNaN(count)) { + return; + } + for (let i = 0; i < count; i++) { + let created: Doc | undefined; + switch (what) { + case 'image': + created = Docs.Create.ImageDocument('https://upload.wikimedia.org/wikipedia/commons/thumb/3/3a/Cat03.jpg/1200px-Cat03.jpg'); + break; + case 'nested collection': + created = Docs.Create.FreeformDocument([], {}); + break; + } + created && Doc.AddDocToList(dataDoc, fieldKey, created); + } + }, + restrictTo: [DocumentType.COL], + }, + + { + expression: /view as (freeform|stacking|masonry|schema|tree)/g, + action: (target: DocumentView, matches: RegExpExecArray) => { + const mode = matches[1]; + mode && (target.props.Document._viewType = mode); + }, + restrictTo: [DocumentType.COL], + } + ); + } +} -- cgit v1.2.3-70-g09d2 From f9cded6755ee236d88e21b3b2d4c6da357d059b2 Mon Sep 17 00:00:00 2001 From: bobzel Date: Mon, 8 Aug 2022 15:47:35 -0400 Subject: moved 5 sec anno recording button to documentButtonsBar. made recording button appear only when there is one & click to play recording. --- src/client/util/CurrentUserUtils.ts | 3 +- src/client/util/DictationManager.ts | 4 -- src/client/views/DocumentButtonBar.tsx | 24 ++++++- src/client/views/PropertiesButtons.tsx | 9 --- src/client/views/nodes/DocumentView.scss | 6 +- src/client/views/nodes/DocumentView.tsx | 82 +++++++++++++--------- src/client/views/nodes/button/FontIconBox.tsx | 9 ++- .../views/nodes/formattedText/FormattedTextBox.tsx | 12 +--- 8 files changed, 90 insertions(+), 59 deletions(-) (limited to 'src/client/util/DictationManager.ts') diff --git a/src/client/util/CurrentUserUtils.ts b/src/client/util/CurrentUserUtils.ts index 7856c913b..d19874720 100644 --- a/src/client/util/CurrentUserUtils.ts +++ b/src/client/util/CurrentUserUtils.ts @@ -107,7 +107,7 @@ export class CurrentUserUtils { const reqdClickList = reqdTempOpts.map(opts => { const allOpts = {...reqdClickOpts, ...opts.opts}; const clickDoc = tempClicks ? DocListCast(tempClicks.data).find(doc => doc.title === opts.opts.title): undefined; - return DocUtils.AssignOpts(clickDoc, allOpts) ?? Docs.Create.ScriptingDocument(ScriptField.MakeScript(opts.script,allOpts)); + return DocUtils.AssignOpts(clickDoc, allOpts) ?? Docs.Create.ScriptingDocument(ScriptField.MakeScript(opts.script, allOpts),allOpts); }); const reqdOpts:DocumentOptions = { title: "child click editors", _height:75, system: true}; @@ -633,6 +633,7 @@ export class CurrentUserUtils { { title: "Center", toolTip: "Center align", btnType: ButtonType.ToggleButton, icon: "align-center", scripts: {onClick:'{ return setAlignment("center", _readOnly_);}'} }, { title: "Right", toolTip: "Right align", btnType: ButtonType.ToggleButton, icon: "align-right", scripts: {onClick:'{ return setAlignment("right", _readOnly_);}'} }, { title: "NoLink", toolTip: "Auto Link", btnType: ButtonType.ToggleButton, icon: "link", scripts: {onClick:'{ return toggleNoAutoLinkAnchor(_readOnly_);}'}}, + { title: "Dictate",toolTip: "Dictate", btnType: ButtonType.ToggleButton, icon: "microphone", scripts: {onClick:'{ return toggleDictation(_readOnly_);}'}}, ]; } diff --git a/src/client/util/DictationManager.ts b/src/client/util/DictationManager.ts index 13f036838..0a61f3478 100644 --- a/src/client/util/DictationManager.ts +++ b/src/client/util/DictationManager.ts @@ -152,7 +152,6 @@ export namespace DictationManager { return new Promise((resolve, reject) => { recognizer.onerror = (e: any) => { - console.log('SPEECH error:', encodeURIComponent); // e is SpeechRecognitionError but where is that defined? if (!(indefinite && e.error === 'no-speech')) { recognizer.stop(); @@ -162,7 +161,6 @@ export namespace DictationManager { }; recognizer.onresult = (e: SpeechRecognitionEvent) => { - console.log('RESULT: ', e); current = synthesize(e, intra); let matchedTerminator: string | undefined; if (options?.terminators && (matchedTerminator = options.terminators.find(end => (current ? current.trim().toLowerCase().endsWith(end.toLowerCase()) : false)))) { @@ -175,7 +173,6 @@ export namespace DictationManager { }; recognizer.onend = (e: Event) => { - console.log('END: ', e); if (!indefinite || isManuallyStopped) { return complete(); } @@ -188,7 +185,6 @@ export namespace DictationManager { }; const complete = () => { - console.log('COMPLETE:'); if (indefinite) { current && sessionResults.push(current); sessionResults.length && resolve(sessionResults.join(inter || interSession)); diff --git a/src/client/views/DocumentButtonBar.tsx b/src/client/views/DocumentButtonBar.tsx index 1d4056759..265df3abc 100644 --- a/src/client/views/DocumentButtonBar.tsx +++ b/src/client/views/DocumentButtonBar.tsx @@ -6,7 +6,7 @@ import { observer } from 'mobx-react'; import { Doc } from '../../fields/Doc'; import { RichTextField } from '../../fields/RichTextField'; import { Cast, NumCast } from '../../fields/Types'; -import { emptyFunction, setupMoveUpEvents, simulateMouseClick } from '../../Utils'; +import { emptyFunction, returnFalse, setupMoveUpEvents, simulateMouseClick } from '../../Utils'; import { GoogleAuthenticationManager } from '../apis/GoogleAuthenticationManager'; import { Pulls, Pushes } from '../apis/google_docs/GoogleApiClientUtils'; import { Docs } from '../documents/Documents'; @@ -334,6 +334,27 @@ export class DocumentButtonBar extends React.Component<{ views: () => (DocumentV ); } + + @observable _isRecording = false; + @computed + get recordButton() { + const targetDoc = this.view0?.props.Document; + return !targetDoc ? null : ( + {'Click to record 5 second annotation'}}> +
{ + this._isRecording = true; + this.props.views().map(view => view?.docView?.recordAudioAnnotation(action(() => (this._isRecording = false)))); + }) + )}> + +
+
+ ); + } @observable _aliasDown = false; onTemplateButton = action((e: React.PointerEvent): void => { this._tooltipOpen = false; @@ -411,6 +432,7 @@ export class DocumentButtonBar extends React.Component<{ views: () => (DocumentV ) : null} +
{this.recordButton}
{ Doc.noviceMode ? null :
{this.templateButton}
/*
diff --git a/src/client/views/PropertiesButtons.tsx b/src/client/views/PropertiesButtons.tsx index 39e7b89c1..80c2c7705 100644 --- a/src/client/views/PropertiesButtons.tsx +++ b/src/client/views/PropertiesButtons.tsx @@ -68,14 +68,6 @@ export class PropertiesButtons extends React.Component<{}, {}> { on => 'thumbtack' ); } - @computed get dictationButton() { - return this.propertyToggleBtn( - 'Dictate', - '_showAudio', - on => `${on ? 'Hide' : 'Show'} dictation/recording controls`, - on => 'microphone' - ); - } @computed get maskButton() { return this.propertyToggleBtn( 'Mask', @@ -365,7 +357,6 @@ export class PropertiesButtons extends React.Component<{}, {}> { {toggle(this.titleButton)} {toggle(this.captionButton)} {toggle(this.lockButton)} - {toggle(this.dictationButton, { display: isNovice ? 'none' : '' })} {toggle(this.onClickButton)} {toggle(this.fitWidthButton)} {toggle(this.freezeThumb)} diff --git a/src/client/views/nodes/DocumentView.scss b/src/client/views/nodes/DocumentView.scss index c02692bfb..6ea697a2f 100644 --- a/src/client/views/nodes/DocumentView.scss +++ b/src/client/views/nodes/DocumentView.scss @@ -65,11 +65,13 @@ width: 25px; height: 25; position: absolute; - top: 10px; - left: 10px; + top: 0; + left: 50%; border-radius: 25px; background: white; opacity: 0.3; + pointer-events: all; + cursor: default; svg { width: 90% !important; diff --git a/src/client/views/nodes/DocumentView.tsx b/src/client/views/nodes/DocumentView.tsx index 2524d66dd..4b2bd07ef 100644 --- a/src/client/views/nodes/DocumentView.tsx +++ b/src/client/views/nodes/DocumentView.tsx @@ -10,7 +10,7 @@ import { List } from '../../../fields/List'; import { ObjectField } from '../../../fields/ObjectField'; import { listSpec } from '../../../fields/Schema'; import { ScriptField } from '../../../fields/ScriptField'; -import { BoolCast, Cast, ImageCast, NumCast, ScriptCast, StrCast } from '../../../fields/Types'; +import { BoolCast, Cast, DocCast, ImageCast, NumCast, ScriptCast, StrCast } from '../../../fields/Types'; import { AudioField } from '../../../fields/URLField'; import { GetEffectiveAcl, SharingPermissions, TraceMobx } from '../../../fields/util'; import { MobileInterface } from '../../../mobile/MobileInterface'; @@ -52,6 +52,8 @@ import { RadialMenu } from './RadialMenu'; import { ScriptingBox } from './ScriptingBox'; import { PresBox } from './trails/PresBox'; import React = require('react'); +import { DictationManager } from '../../util/DictationManager'; +import { Tooltip } from '@material-ui/core'; const { Howl } = require('howler'); interface Window { @@ -1005,16 +1007,16 @@ export class DocumentViewInternal extends DocComponent (this.props.NativeDimScaling?.() || 1) * this.props.DocumentView().screenToLocalTransform().Scale; @computed get contents() { TraceMobx(); - const audioView = !this.layoutDoc._showAudio ? null : ( -
- -
- ); + const audioAnnosCount = Cast(this.dataDoc[this.LayoutFieldKey + '-audioAnnotations'], listSpec(AudioField), null)?.length; + const audioTextAnnos = Cast(this.dataDoc[this.LayoutFieldKey + '-audioAnnotations-text'], listSpec('string'), null); + const audioView = + (!this.props.isSelected() && !this._isHovering) || this.props.renderDepth === -1 || SnappingManager.GetIsDragging() || (!audioAnnosCount && !this._mediaState) ? null : ( + {audioTextAnnos?.lastElement()}
}> +
+ +
+ + ); return (
{ + playAnnotation = () => { const self = this; - const audioAnnos = DocListCast(this.dataDoc[this.LayoutFieldKey + '-audioAnnotations']); - if (audioAnnos && audioAnnos.length && this._mediaState === 0) { - const anno = audioAnnos[Math.floor(Math.random() * audioAnnos.length)]; - anno.data instanceof AudioField && - new Howl({ - src: [anno.data.url.href], - format: ['mp3'], - autoplay: true, - loop: false, - volume: 0.5, - onend: function () { - runInAction(() => (self._mediaState = 0)); - }, - }); + const audioAnnos = Cast(this.dataDoc[this.LayoutFieldKey + '-audioAnnotations'], listSpec(AudioField), null); + const anno = audioAnnos.lastElement(); + if (anno instanceof AudioField && this._mediaState === 0) { + new Howl({ + src: [anno.url.href], + format: ['mp3'], + autoplay: true, + loop: false, + volume: 0.5, + onend: function () { + runInAction(() => { + console.log('PLAYED'); + self._mediaState = 0; + }); + }, + }); this._mediaState = 1; } }; - recordAudioAnnotation = () => { + + recordAudioAnnotation = (onEnd?: () => void) => { let gumStream: any; let recorder: any; const self = this; @@ -1176,18 +1181,30 @@ export class DocumentViewInternal extends DocComponent(['']); + DictationManager.Controls.listen({ + interimHandler: value => (audioTextAnnos[audioTextAnnos.length - 1] = value), + continuous: { indefinite: false }, + }).then(results => { + if (results && [DictationManager.Controls.Infringed].includes(results)) { + DictationManager.Controls.stop(); + } + onEnd?.(); + }); + gumStream = stream; recorder = new MediaRecorder(stream); recorder.ondataavailable = async (e: any) => { const [{ result }] = await Networking.UploadFilesToServer(e.data); if (!(result instanceof Error)) { - const audioDoc = Docs.Create.AudioDocument(result.accessPaths.agnostic.client, { title: 'audio test', _width: 200, _height: 32 }); - audioDoc.treeViewExpandedView = 'layout'; - const audioAnnos = Cast(self.dataDoc[self.LayoutFieldKey + '-audioAnnotations'], listSpec(Doc)); + const audioField = new AudioField(result.accessPaths.agnostic.client); + const audioAnnos = Cast(self.dataDoc[self.LayoutFieldKey + '-audioAnnotations'], listSpec(AudioField), null); if (audioAnnos === undefined) { - self.dataDoc[self.LayoutFieldKey + '-audioAnnotations'] = new List([audioDoc]); + self.dataDoc[self.LayoutFieldKey + '-audioAnnotations'] = new List([audioField]); } else { - audioAnnos.push(audioDoc); + audioAnnos.push(audioField); } } }; @@ -1195,6 +1212,7 @@ export class DocumentViewInternal extends DocComponent { recorder.stop(); + DictationManager.Controls.stop(false); runInAction(() => (self._mediaState = 0)); gumStream.getAudioTracks()[0].stop(); }, 5000); diff --git a/src/client/views/nodes/button/FontIconBox.tsx b/src/client/views/nodes/button/FontIconBox.tsx index 78ef85ec2..d3b95e25a 100644 --- a/src/client/views/nodes/button/FontIconBox.tsx +++ b/src/client/views/nodes/button/FontIconBox.tsx @@ -1,7 +1,7 @@ import { IconProp } from '@fortawesome/fontawesome-svg-core'; import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; import { Tooltip } from '@material-ui/core'; -import { action, computed, observable } from 'mobx'; +import { action, computed, observable, runInAction } from 'mobx'; import { observer } from 'mobx-react'; import * as React from 'react'; import { ColorState, SketchPicker } from 'react-color'; @@ -710,6 +710,13 @@ ScriptingGlobals.add(function toggleNoAutoLinkAnchor(checkResult?: boolean) { } if (editorView) RichTextMenu.Instance?.toggleNoAutoLinkAnchor(); }); +ScriptingGlobals.add(function toggleDictation(checkResult?: boolean) { + const textView = RichTextMenu.Instance?.TextView; + if (checkResult) { + return textView?._recording ? Colors.MEDIUM_BLUE : 'transparent'; + } + if (textView) runInAction(() => (textView._recording = !textView._recording)); +}); ScriptingGlobals.add(function toggleBold(checkResult?: boolean) { const editorView = RichTextMenu.Instance?.TextView?.EditorView; diff --git a/src/client/views/nodes/formattedText/FormattedTextBox.tsx b/src/client/views/nodes/formattedText/FormattedTextBox.tsx index 849deb04e..929cca1ea 100644 --- a/src/client/views/nodes/formattedText/FormattedTextBox.tsx +++ b/src/client/views/nodes/formattedText/FormattedTextBox.tsx @@ -771,7 +771,6 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent (FormattedTextBox._canAnnotate = !FormattedTextBox._canAnnotate), icon: 'expand-arrows-alt' }); uicontrols.push({ description: !this.Document._noSidebar ? 'Hide Sidebar Handle' : 'Show Sidebar Handle', event: () => (this.layoutDoc._noSidebar = !this.layoutDoc._noSidebar), icon: 'expand-arrows-alt' }); - uicontrols.push({ description: `${this.layoutDoc._showAudio ? 'Hide' : 'Show'} Dictation Icon`, event: () => (this.layoutDoc._showAudio = !this.layoutDoc._showAudio), icon: 'expand-arrows-alt' }); uicontrols.push({ description: 'Show Highlights...', noexpand: true, subitems: highlighting, icon: 'hand-point-right' }); !Doc.noviceMode && uicontrols.push({ @@ -839,7 +838,6 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent { - console.log('RECORD DICTATIN:'); DictationManager.Controls.listen({ interimHandler: this.setDictationContent, continuous: { indefinite: false }, @@ -852,11 +850,8 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent DictationManager.Controls.stop(!abort); setDictationContent = (value: string) => { - console.log('DICTATION CONETNT: ' + value); if (this._editorView && this._recordingStart) { - console.log('STEP 1'); if (this._break) { - console.log('BREAK'); const textanchorFunc = () => { const tanch = Docs.Create.TextanchorDocument({ title: 'dictation anchor' }); return this.addDocument(tanch) ? tanch : undefined; @@ -880,7 +875,6 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent @@ -1744,7 +1738,7 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent (this._recording = !this._recording)) ) }> - +
); } @@ -1898,7 +1892,7 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent {this.noSidebar || this.props.dontSelectOnLoad || !this.SidebarShown || this.sidebarWidthPercent === '0%' ? null : this.sidebarCollection} {this.noSidebar || this.Document._noSidebar || this.props.dontSelectOnLoad || this.Document._singleLine ? null : this.sidebarHandle} - {!this.layoutDoc._showAudio ? null : this.audioHandle} + {this.audioHandle} ); -- cgit v1.2.3-70-g09d2