diff options
author | bobzel <zzzman@gmail.com> | 2021-03-24 04:00:15 -0400 |
---|---|---|
committer | bobzel <zzzman@gmail.com> | 2021-03-24 04:00:15 -0400 |
commit | 7f5cee959475b4e42af3b4ea39dcf92f450fa564 (patch) | |
tree | f71a232c2eca6c9926a1d12eae3421a7a0f4a070 /src | |
parent | 8f8a12f6e81482d6cbc4789c3b7f74015f33f423 (diff) |
renamed audioState to mediaState and cleaned up screenshotBox a little.
Diffstat (limited to 'src')
-rw-r--r-- | src/client/documents/Documents.ts | 2 | ||||
-rw-r--r-- | src/client/views/nodes/AudioBox.tsx | 28 | ||||
-rw-r--r-- | src/client/views/nodes/DocumentView.tsx | 14 | ||||
-rw-r--r-- | src/client/views/nodes/ScreenshotBox.tsx | 130 | ||||
-rw-r--r-- | src/client/views/nodes/formattedText/FormattedTextBox.tsx | 6 |
5 files changed, 51 insertions, 129 deletions
diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index cdbf43cd7..4e05793d4 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -714,7 +714,7 @@ export namespace Docs { export function AudioDocument(url: string, options: DocumentOptions = {}) { return InstanceFromProto(Prototypes.get(DocumentType.AUDIO), new AudioField(new URL(url)), - { ...options, backgroundColor: ComputedField.MakeFunction("this._audioState === 'playing' ? 'green':'gray'") as any }); + { ...options, backgroundColor: ComputedField.MakeFunction("this._mediaState === 'playing' ? 'green':'gray'") as any }); } export function SearchDocument(options: DocumentOptions = {}) { diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 393a80648..2f7a6cfd8 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -57,8 +57,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps, AudioD @observable _position: number = 0; @observable _waveHeight: Opt<number> = this.layoutDoc._height; @observable _paused: boolean = false; - @computed get audioState(): undefined | "recording" | "paused" | "playing" { return this.dataDoc.audioState as (undefined | "recording" | "paused" | "playing"); } - set audioState(value) { this.dataDoc.audioState = value; } + @computed get mediaState(): undefined | "recording" | "paused" | "playing" { return this.dataDoc.mediaState as (undefined | "recording" | "paused" | "playing"); } + set mediaState(value) { this.dataDoc.mediaState = value; } public static SetScrubTime = action((timeInMillisFrom1970: number) => { AudioBox._scrubTime = 0; AudioBox._scrubTime = timeInMillisFrom1970; }); @computed get recordingStart() { return Cast(this.dataDoc[this.props.fieldKey + "-recordingStart"], DateField)?.date.getTime(); } @computed get duration() { return NumCast(this.dataDoc[`${this.fieldKey}-duration`]); } @@ -90,7 +90,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps, AudioD getAnchor = () => { return CollectionStackedTimeline.createAnchor(this.rootDoc, this.dataDoc, this.annotationKey, "_timecodeToShow" /* audioStart */, "_timecodeToHide" /* audioEnd */, this._ele?.currentTime || - Cast(this.props.Document._currentTimecode, "number", null) || (this.audioState === "recording" ? (Date.now() - (this.recordingStart || 0)) / 1000 : undefined)) + Cast(this.props.Document._currentTimecode, "number", null) || (this.mediaState === "recording" ? (Date.now() - (this.recordingStart || 0)) / 1000 : undefined)) || this.rootDoc; } @@ -104,7 +104,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps, AudioD componentDidMount() { this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link. - this.audioState = this.path ? "paused" : undefined; + this.mediaState = this.path ? "paused" : undefined; this._disposers.triggerAudio = reaction( () => !LinkDocPreview.LinkInfo && this.props.renderDepth !== -1 ? NumCast(this.Document._triggerAudio, null) : undefined, @@ -131,7 +131,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps, AudioD // for updating the timecode timecodeChanged = () => { const htmlEle = this._ele; - if (this.audioState !== "recording" && htmlEle) { + if (this.mediaState !== "recording" && htmlEle) { htmlEle.duration && htmlEle.duration !== Infinity && runInAction(() => this.dataDoc[this.fieldKey + "-duration"] = htmlEle.duration); this.links.map(l => this.getLinkData(l)).forEach(({ la1, la2, linkTime }) => { if (linkTime > NumCast(this.layoutDoc._currentTimecode) && linkTime < htmlEle.currentTime) { @@ -145,7 +145,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps, AudioD // pause play back Pause = action(() => { this._ele!.pause(); - this.audioState = "paused"; + this.mediaState = "paused"; }); // play audio for documents created during recording @@ -169,7 +169,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps, AudioD } else if (seekTimeInSeconds <= this._ele.duration) { this._ele.currentTime = seekTimeInSeconds; this._ele.play(); - runInAction(() => this.audioState = "playing"); + runInAction(() => this.mediaState = "playing"); if (endTime !== this.duration) { this._play = setTimeout(() => this.Pause(), (endTime - seekTimeInSeconds) * 1000); // use setTimeout to play a specific duration } @@ -181,7 +181,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps, AudioD // update the recording time updateRecordTime = () => { - if (this.audioState === "recording") { + if (this.mediaState === "recording") { setTimeout(this.updateRecordTime, 30); if (this._paused) { this._pausedTime += (new Date().getTime() - this._recordStart) / 1000; @@ -204,7 +204,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps, AudioD } }; this._recordStart = new Date().getTime(); - runInAction(() => this.audioState = "recording"); + runInAction(() => this.mediaState = "recording"); setTimeout(this.updateRecordTime, 0); this._recorder.start(); setTimeout(() => this._recorder && this.stopRecording(), 60 * 60 * 1000); // stop after an hour @@ -224,7 +224,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps, AudioD this._recorder.stop(); this._recorder = undefined; this.dataDoc[this.fieldKey + "-duration"] = (new Date().getTime() - this._recordStart - this.pauseTime) / 1000; - this.audioState = "paused"; + this.mediaState = "paused"; this._stream?.getAudioTracks()[0].stop(); const ind = DocUtils.ActiveRecordings.indexOf(this); ind !== -1 && (DocUtils.ActiveRecordings.splice(ind, 1)); @@ -250,7 +250,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps, AudioD NumCast(this.props.Document._width), 2 * NumCast(this.props.Document._height)); Doc.GetProto(newDoc).recordingSource = this.dataDoc; Doc.GetProto(newDoc).recordingStart = ComputedField.MakeFunction(`self.recordingSource["${this.props.fieldKey}-recordingStart"]`); - Doc.GetProto(newDoc).audioState = ComputedField.MakeFunction("self.recordingSource.audioState"); + Doc.GetProto(newDoc).mediaState = ComputedField.MakeFunction("self.recordingSource.mediaState"); this.props.addDocument?.(newDoc); e.stopPropagation(); } @@ -324,7 +324,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps, AudioD ); } - playing = () => this.audioState === "playing"; + playing = () => this.mediaState === "playing"; playLink = (link: Doc) => { const stack = this._stackedTimeline.current; if (link.annotationOn === this.rootDoc) { @@ -387,7 +387,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps, AudioD <div className="audiobox-dictation" onClick={this.onFile}> <FontAwesomeIcon style={{ width: "30px", background: !this.layoutDoc.dontAutoPlayFollowedLinks ? "yellow" : "rgba(0,0,0,0)" }} icon="file-alt" size={this.props.PanelHeight() < 36 ? "1x" : "2x"} /> </div> - {this.audioState === "recording" || this.audioState === "paused" ? + {this.mediaState === "recording" || this.mediaState === "paused" ? <div className="recording" onClick={e => e.stopPropagation()}> <div className="buttons" onClick={this.recordClick}> <FontAwesomeIcon icon={"stop"} size={this.props.PanelHeight() < 36 ? "1x" : "2x"} /> @@ -405,7 +405,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps, AudioD <div className="audiobox-controls" style={{ pointerEvents: this._isChildActive || this.active() ? "all" : "none" }} > <div className="audiobox-dictation" /> <div className="audiobox-player" style={{ height: `${AudioBox.heightPercent}%` }} > - <div className="audiobox-playhead" style={{ width: AudioBox.playheadWidth }} title={this.audioState === "paused" ? "play" : "pause"} onClick={this.Play}> <FontAwesomeIcon style={{ width: "100%", position: "absolute", left: "0px", top: "5px", borderWidth: "thin", borderColor: "white" }} icon={this.audioState === "paused" ? "play" : "pause"} size={"1x"} /></div> + <div className="audiobox-playhead" style={{ width: AudioBox.playheadWidth }} title={this.mediaState === "paused" ? "play" : "pause"} onClick={this.Play}> <FontAwesomeIcon style={{ width: "100%", position: "absolute", left: "0px", top: "5px", borderWidth: "thin", borderColor: "white" }} icon={this.mediaState === "paused" ? "play" : "pause"} size={"1x"} /></div> <div className="audiobox-timeline" style={{ top: 0, height: `100%`, left: AudioBox.playheadWidth, width: `calc(100% - ${AudioBox.playheadWidth}px)`, background: "white" }}> <div className="waveform"> {this.waveform} diff --git a/src/client/views/nodes/DocumentView.tsx b/src/client/views/nodes/DocumentView.tsx index 3f76db46d..aff0efdc7 100644 --- a/src/client/views/nodes/DocumentView.tsx +++ b/src/client/views/nodes/DocumentView.tsx @@ -158,7 +158,7 @@ export interface DocumentViewInternalProps extends DocumentViewProps { @observer export class DocumentViewInternal extends DocComponent<DocumentViewInternalProps, Document>(Document) { @observable _animateScalingTo = 0; - @observable _audioState = 0; + @observable _mediaState = 0; @observable _pendingDoubleClick = false; private _downX: number = 0; private _downY: number = 0; @@ -772,7 +772,7 @@ export class DocumentViewInternal extends DocComponent<DocumentViewInternalProps style={{ height: 25, position: "absolute", top: 10, left: 10 }} > <FontAwesomeIcon className="documentView-audioFont" - style={{ color: [DocListCast(this.dataDoc[this.LayoutFieldKey + "-audioAnnotations"]).length ? "blue" : "gray", "green", "red"][this._audioState] }} + style={{ color: [DocListCast(this.dataDoc[this.LayoutFieldKey + "-audioAnnotations"]).length ? "blue" : "gray", "green", "red"][this._mediaState] }} icon={!DocListCast(this.dataDoc[this.LayoutFieldKey + "-audioAnnotations"]).length ? "microphone" : "file-audio"} size="sm" /> </div>; return <div className="documentView-contentsView" @@ -835,7 +835,7 @@ export class DocumentViewInternal extends DocComponent<DocumentViewInternalProps onPointerEnter = () => { const self = this; const audioAnnos = DocListCast(this.dataDoc[this.LayoutFieldKey + "-audioAnnotations"]); - if (audioAnnos && audioAnnos.length && this._audioState === 0) { + if (audioAnnos && audioAnnos.length && this._mediaState === 0) { const anno = audioAnnos[Math.floor(Math.random() * audioAnnos.length)]; anno.data instanceof AudioField && new Howl({ src: [anno.data.url.href], @@ -844,10 +844,10 @@ export class DocumentViewInternal extends DocComponent<DocumentViewInternalProps loop: false, volume: 0.5, onend: function () { - runInAction(() => self._audioState = 0); + runInAction(() => self._mediaState = 0); } }); - this._audioState = 1; + this._mediaState = 1; } } recordAudioAnnotation = () => { @@ -872,11 +872,11 @@ export class DocumentViewInternal extends DocComponent<DocumentViewInternalProps } } }; - runInAction(() => self._audioState = 2); + runInAction(() => self._mediaState = 2); recorder.start(); setTimeout(() => { recorder.stop(); - runInAction(() => self._audioState = 0); + runInAction(() => self._mediaState = 0); gumStream.getAudioTracks()[0].stop(); }, 5000); }); diff --git a/src/client/views/nodes/ScreenshotBox.tsx b/src/client/views/nodes/ScreenshotBox.tsx index 8e1a43fd1..bed6566a6 100644 --- a/src/client/views/nodes/ScreenshotBox.tsx +++ b/src/client/views/nodes/ScreenshotBox.tsx @@ -1,38 +1,36 @@ import React = require("react"); import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; -import { action, computed, IReactionDisposer, observable, runInAction } from "mobx"; +import { action, computed, IReactionDisposer, observable } from "mobx"; import { observer } from "mobx-react"; -import * as rp from 'request-promise'; +import { DateField } from "../../../fields/DateField"; import { Doc, WidthSym } from "../../../fields/Doc"; import { documentSchema } from "../../../fields/documentSchemas"; +import { Id } from "../../../fields/FieldSymbols"; import { InkTool } from "../../../fields/InkField"; -import { listSpec, makeInterface } from "../../../fields/Schema"; +import { makeInterface } from "../../../fields/Schema"; +import { ComputedField } from "../../../fields/ScriptField"; import { Cast, NumCast } from "../../../fields/Types"; -import { VideoField, AudioField } from "../../../fields/URLField"; -import { emptyFunction, returnFalse, returnOne, returnZero, Utils, OmitKeys } from "../../../Utils"; -import { Docs, DocUtils } from "../../documents/Documents"; +import { AudioField, VideoField } from "../../../fields/URLField"; +import { emptyFunction, OmitKeys, returnFalse, returnOne, Utils } from "../../../Utils"; +import { DocUtils } from "../../documents/Documents"; +import { DocumentType } from "../../documents/DocumentTypes"; +import { Networking } from "../../Network"; +import { CurrentUserUtils } from "../../util/CurrentUserUtils"; +import { DocumentManager } from "../../util/DocumentManager"; import { CollectionFreeFormView } from "../collections/collectionFreeForm/CollectionFreeFormView"; +import { CollectionStackedTimeline } from "../collections/CollectionStackedTimeline"; import { ContextMenu } from "../ContextMenu"; import { ContextMenuProps } from "../ContextMenuItem"; -import { ViewBoxBaseComponent, ViewBoxAnnotatableComponent } from "../DocComponent"; +import { ViewBoxAnnotatableComponent } from "../DocComponent"; +import { DocumentView } from "./DocumentView"; import { FieldView, FieldViewProps } from './FieldView'; import "./ScreenshotBox.scss"; -import { CurrentUserUtils } from "../../util/CurrentUserUtils"; -import { Networking } from "../../Network"; -import { DocumentType } from "../../documents/DocumentTypes"; import { VideoBox } from "./VideoBox"; -import { Id } from "../../../fields/FieldSymbols"; -import { CollectionStackedTimeline } from "../collections/CollectionStackedTimeline"; -import { DateField } from "../../../fields/DateField"; -import { ComputedField } from "../../../fields/ScriptField"; -import { DocumentManager } from "../../util/DocumentManager"; -import { DocumentView } from "./DocumentView"; const path = require('path'); declare class MediaRecorder { constructor(e: any, options?: any); // whatever MediaRecorder has } - type ScreenshotDocument = makeInterface<[typeof documentSchema]>; const ScreenshotDocument = makeInterface(documentSchema); @@ -40,12 +38,15 @@ const ScreenshotDocument = makeInterface(documentSchema); export class ScreenshotBox extends ViewBoxAnnotatableComponent<FieldViewProps, ScreenshotDocument>(ScreenshotDocument) { private _reactionDisposer?: IReactionDisposer; private _videoRef: HTMLVideoElement | null = null; + private _vchunks: any; + private _achunks: any; + private _vrecorder: any; + private _arecorder: any; + private _dictation: Doc | undefined; + private _dictationView: DocumentView | undefined; public static LayoutString(fieldKey: string) { return FieldView.LayoutString(ScreenshotBox, fieldKey); } @computed get recordingStart() { return Cast(this.dataDoc[this.props.fieldKey + "-recordingStart"], DateField)?.date.getTime(); } - public get player(): HTMLVideoElement | null { - return this._videoRef; - } getAnchor = () => { const startTime = Cast(this.layoutDoc._currentTimecode, "number", null) || (this._vrecorder ? (Date.now() - (this.recordingStart || 0)) / 1000 : undefined); @@ -55,7 +56,7 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<FieldViewProps, S } videoLoad = () => { - const aspect = this.player!.videoWidth / this.player!.videoHeight; + const aspect = this._videoRef!.videoWidth / this._videoRef!.videoHeight; const nativeWidth = Doc.NativeWidth(this.layoutDoc); const nativeHeight = Doc.NativeHeight(this.layoutDoc); if (!nativeWidth || !nativeHeight) { @@ -65,49 +66,6 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<FieldViewProps, S } } - @action public Snapshot() { - const width = NumCast(this.layoutDoc._width); - const height = NumCast(this.layoutDoc._height); - const canvas = document.createElement('canvas'); - canvas.width = 640; - canvas.height = 640 / (Doc.NativeAspect(this.layoutDoc) || 1); - const ctx = canvas.getContext('2d');//draw image to canvas. scale to target dimensions - if (ctx) { - ctx.rect(0, 0, canvas.width, canvas.height); - ctx.fillStyle = "blue"; - ctx.fill(); - this._videoRef && ctx.drawImage(this._videoRef, 0, 0, canvas.width, canvas.height); - } - - if (this._videoRef) { - //convert to desired file format - const dataUrl = canvas.toDataURL('image/png'); // can also use 'image/png' - // if you want to preview the captured image, - const filename = path.basename(encodeURIComponent("screenshot" + Utils.GenerateGuid().replace(/\..*$/, "").replace(" ", "_"))); - ScreenshotBox.convertDataUri(dataUrl, filename).then(returnedFilename => { - setTimeout(() => { - if (returnedFilename) { - const imageSummary = Docs.Create.ImageDocument(Utils.prepend(returnedFilename), { - x: NumCast(this.layoutDoc.x) + width, y: NumCast(this.layoutDoc.y), - _width: 150, _height: height / width * 150, title: "--screenshot--" - }); - if (!this.props.addDocument || this.props.addDocument === returnFalse) { - const spt = this.props.ScreenToLocalTransform().inverse().transformPoint(0, 0); - imageSummary.x = spt[0]; - imageSummary.y = spt[1]; - Cast(Cast(Doc.UserDoc().myOverlayDocs, Doc, null)?.data, listSpec(Doc), []).push(imageSummary); - } else { - this.props.addDocument?.(imageSummary); - } - } - }, 500); - }); - } - } - - componentDidMount() { - } - componentWillUnmount() { this._reactionDisposer?.(); const ind = DocUtils.ActiveRecordings.indexOf(this); @@ -115,33 +73,13 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<FieldViewProps, S } @action - setVideoRef = (vref: HTMLVideoElement | null) => { - this._videoRef = vref; - } - - public static async convertDataUri(imageUri: string, returnedFilename: string) { - try { - const posting = Utils.prepend("/uploadURI"); - const returnedUri = await rp.post(posting, { - body: { - uri: imageUri, - name: returnedFilename - }, - json: true, - }); - return returnedUri; + setVideoRef = (vref: HTMLVideoElement | null) => this._videoRef = vref; - } catch (e) { - console.log("ScreenShotBox:" + e); - } - } @observable _screenCapture = false; specificContextMenu = (e: React.MouseEvent): void => { const field = Cast(this.dataDoc[this.fieldKey], VideoField); if (field) { - const url = field.url.href; const subitems: ContextMenuProps[] = []; - subitems.push({ description: "Take Snapshot", event: () => this.Snapshot(), icon: "expand-arrows-alt" }); subitems.push({ description: "Screen Capture", event: this.toggleRecording, icon: "expand-arrows-alt" }); ContextMenu.Instance.addItem({ description: "Options...", subitems: subitems, icon: "video" }); } @@ -160,13 +98,6 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<FieldViewProps, S </video>; } - _vchunks: any; - _achunks: any; - _vrecorder: any; - _arecorder: any; - _dictation: Doc | undefined; - _dictationView: DocumentView | undefined; - toggleRecording = action(async () => { this._screenCapture = !this._screenCapture; if (this._screenCapture) { @@ -199,12 +130,12 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<FieldViewProps, S setTimeout(() => this._dictationView = DocumentManager.Instance.getDocumentView(this._dictation!)); this._arecorder.start(); this._vrecorder.start(); - this.dataDoc.audioState = "recording"; + this.dataDoc.mediaState = "recording"; DocUtils.ActiveRecordings.push(this); } else { this._arecorder.stop(); this._vrecorder.stop(); - this.dataDoc.audioState = "paused"; + this.dataDoc.mediaState = "paused"; const ind = DocUtils.ActiveRecordings.indexOf(this); ind !== -1 && (DocUtils.ActiveRecordings.splice(ind, 1)); } @@ -217,7 +148,7 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<FieldViewProps, S const dictationTextProto = Doc.GetProto(dictationText); dictationTextProto.recordingSource = this.dataDoc; dictationTextProto.recordingStart = ComputedField.MakeFunction(`self.recordingSource["${this.props.fieldKey}-recordingStart"]`); - dictationTextProto.audioState = ComputedField.MakeFunction("self.recordingSource.audioState"); + dictationTextProto.mediaState = ComputedField.MakeFunction("self.recordingSource.mediaState"); this.props.addDocument?.(dictationText) || this.props.addDocTab(dictationText, "add:bottom"); return dictationText; } @@ -227,18 +158,9 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<FieldViewProps, S <div className="screenshotBox-recorder" key="snap" onPointerDown={this.toggleRecording} > <FontAwesomeIcon icon="file" size="lg" /> </div> - <div className="screenshotBox-snapshot" key="cam" onPointerDown={this.onSnapshot} > - <FontAwesomeIcon icon="camera" size="lg" /> - </div> </div>); } - onSnapshot = (e: React.PointerEvent) => { - this.Snapshot(); - e.stopPropagation(); - e.preventDefault(); - } - contentFunc = () => [this.content]; render() { return (<div className="videoBox" onContextMenu={this.specificContextMenu} diff --git a/src/client/views/nodes/formattedText/FormattedTextBox.tsx b/src/client/views/nodes/formattedText/FormattedTextBox.tsx index 85c2a7cb3..2a1cc854c 100644 --- a/src/client/views/nodes/formattedText/FormattedTextBox.tsx +++ b/src/client/views/nodes/formattedText/FormattedTextBox.tsx @@ -124,9 +124,9 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp @computed get scrollHeight() { return NumCast(this.rootDoc[this.fieldKey + "-scrollHeight"]); } @computed get sidebarHeight() { return NumCast(this.rootDoc[this.SidebarKey + "-height"]); } @computed get titleHeight() { return this.props.styleProvider?.(this.layoutDoc, this.props, StyleProp.HeaderMargin) || 0; } - @computed get _recording() { return this.dataDoc?.audioState === "recording"; } + @computed get _recording() { return this.dataDoc?.mediaState === "recording"; } set _recording(value) { - !this.dataDoc.recordingSource && (this.dataDoc.audioState = value ? "recording" : undefined); + !this.dataDoc.recordingSource && (this.dataDoc.mediaState = value ? "recording" : undefined); } @computed get config() { this._keymap = buildKeymap(schema, this.props); @@ -307,7 +307,7 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp let link; DocListCast(this.dataDoc.links).forEach((l, i) => { const anchor = (l.anchor1 as Doc).annotationOn ? l.anchor1 as Doc : (l.anchor2 as Doc).annotationOn ? (l.anchor2 as Doc) : undefined; - if (anchor && (anchor.annotationOn as Doc).audioState === "recording") { + if (anchor && (anchor.annotationOn as Doc).mediaState === "recording") { linkTime = NumCast(anchor._timecodeToShow /* audioStart */); linkAnchor = anchor; link = l; |