aboutsummaryrefslogtreecommitdiff
path: root/src/client/views
diff options
context:
space:
mode:
Diffstat (limited to 'src/client/views')
-rw-r--r--src/client/views/StyleProvider.tsx4
-rw-r--r--src/client/views/collections/CollectionStackedTimeline.tsx2
-rw-r--r--src/client/views/collections/CollectionTreeView.tsx1
-rw-r--r--src/client/views/nodes/AudioBox.tsx15
-rw-r--r--src/client/views/nodes/DocumentView.tsx10
-rw-r--r--src/client/views/nodes/KeyValuePair.tsx14
-rw-r--r--src/client/views/nodes/RecordingBox/RecordingBox.tsx63
-rw-r--r--src/client/views/nodes/RecordingBox/RecordingView.tsx44
-rw-r--r--src/client/views/nodes/ScreenshotBox.tsx23
-rw-r--r--src/client/views/nodes/VideoBox.tsx10
-rw-r--r--src/client/views/nodes/formattedText/FormattedTextBox.tsx6
11 files changed, 130 insertions, 62 deletions
diff --git a/src/client/views/StyleProvider.tsx b/src/client/views/StyleProvider.tsx
index a757a0715..c2dcf071d 100644
--- a/src/client/views/StyleProvider.tsx
+++ b/src/client/views/StyleProvider.tsx
@@ -296,11 +296,11 @@ export function DefaultStyleProvider(doc: Opt<Doc>, props: Opt<DocumentViewProps
};
const audio = () => {
const audioAnnoState = (doc: Doc) => StrCast(doc.audioAnnoState, 'stopped');
- const audioAnnosCount = (doc: Doc) => StrListCast(doc[Doc.LayoutFieldKey(doc) + '-audioAnnotations']).length;
+ const audioAnnosCount = (doc: Doc) => StrListCast(doc[Doc.LayoutFieldKey(doc) + '_audioAnnotations']).length;
if (!doc || props?.renderDepth === -1 || (!audioAnnosCount(doc) && audioAnnoState(doc) === 'stopped')) return null;
const audioIconColors: { [key: string]: string } = { recording: 'red', playing: 'green', stopped: 'blue' };
return (
- <Tooltip title={<div>{StrListCast(doc[Doc.LayoutFieldKey(doc) + '-audioAnnotations-text']).lastElement()}</div>}>
+ <Tooltip title={<div>{StrListCast(doc[Doc.LayoutFieldKey(doc) + '_audioAnnotations_text']).lastElement()}</div>}>
<div className="styleProvider-audio" onPointerDown={() => DocumentManager.Instance.getFirstDocumentView(doc)?.docView?.playAnnotation()}>
<FontAwesomeIcon className="documentView-audioFont" style={{ color: audioIconColors[audioAnnoState(doc)] }} icon={'file-audio'} size="sm" />
</div>
diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx
index ad84d859d..de58e1fe7 100644
--- a/src/client/views/collections/CollectionStackedTimeline.tsx
+++ b/src/client/views/collections/CollectionStackedTimeline.tsx
@@ -716,7 +716,7 @@ class StackedTimelineAnchor extends React.Component<StackedTimelineAnchorProps>
this._disposer = reaction(
() => this.props.currentTimecode(),
time => {
- const dictationDoc = Cast(this.props.layoutDoc['data-dictation'], Doc, null);
+ const dictationDoc = Cast(this.props.layoutDoc.data_dictation, Doc, null);
const isDictation = dictationDoc && LinkManager.Links(this.props.mark).some(link => Cast(link.link_anchor_1, Doc, null)?.annotationOn === dictationDoc);
if (
!LightboxView.LightboxDoc &&
diff --git a/src/client/views/collections/CollectionTreeView.tsx b/src/client/views/collections/CollectionTreeView.tsx
index b5c7d3f5d..eed04b3ee 100644
--- a/src/client/views/collections/CollectionTreeView.tsx
+++ b/src/client/views/collections/CollectionTreeView.tsx
@@ -426,6 +426,7 @@ export class CollectionTreeView extends CollectionSubView<Partial<collectionTree
minHeight: '100%',
}}
onWheel={e => e.stopPropagation()}
+ onClick={e => (!this.layoutDoc.forceActive ? this.props.select(false) : SelectionManager.DeselectAll())}
onDrop={this.onTreeDrop}>
<ul className={`no-indent${this.outlineMode ? '-outline' : ''}`}>{this.treeViewElements}</ul>
</div>
diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx
index 6558d215a..7c409c38c 100644
--- a/src/client/views/nodes/AudioBox.tsx
+++ b/src/client/views/nodes/AudioBox.tsx
@@ -73,7 +73,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
@observable _paused: boolean = false; // is recording paused
// @observable rawDuration: number = 0; // computed from the length of the audio element when loaded
@computed get recordingStart() {
- return DateCast(this.dataDoc[this.fieldKey + '-recordingStart'])?.date.getTime();
+ return DateCast(this.dataDoc[this.fieldKey + '_recordingStart'])?.date.getTime();
}
@computed get rawDuration() {
return NumCast(this.dataDoc[`${this.fieldKey}_duration`]);
@@ -230,10 +230,10 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
recordAudioAnnotation = async () => {
this._stream = await navigator.mediaDevices.getUserMedia({ audio: true });
this._recorder = new MediaRecorder(this._stream);
- this.dataDoc[this.fieldKey + '-recordingStart'] = new DateField();
+ this.dataDoc[this.fieldKey + '_recordingStart'] = new DateField();
DocUtils.ActiveRecordings.push(this);
this._recorder.ondataavailable = async (e: any) => {
- const [{ result }] = await Networking.UploadFilesToServer({file: e.data});
+ const [{ result }] = await Networking.UploadFilesToServer({ file: e.data });
if (!(result instanceof Error)) {
this.props.Document[this.fieldKey] = new AudioField(result.accessPaths.agnostic.client);
}
@@ -359,9 +359,10 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
returnFalse,
action(() => {
const newDoc = DocUtils.GetNewTextDoc('', NumCast(this.rootDoc.x), NumCast(this.rootDoc.y) + NumCast(this.layoutDoc._height) + 10, NumCast(this.layoutDoc._width), 2 * NumCast(this.layoutDoc._height));
- Doc.GetProto(newDoc).recordingSource = this.dataDoc;
- Doc.GetProto(newDoc).recordingStart = ComputedField.MakeFunction(`self.recordingSource["${this.fieldKey}-recordingStart"]`);
- Doc.GetProto(newDoc).mediaState = ComputedField.MakeFunction('self.recordingSource.mediaState');
+ const textField = Doc.LayoutFieldKey(newDoc);
+ Doc.GetProto(newDoc)[`${textField}_recordingSource`] = this.dataDoc;
+ Doc.GetProto(newDoc)[`${textField}_recordingStart`] = ComputedField.MakeFunction(`self.${textField}_recordingSource.${this.fieldKey}_recordingStart`);
+ Doc.GetProto(newDoc).mediaState = ComputedField.MakeFunction(`self.${textField}_recordingSource.mediaState`);
if (Doc.IsInMyOverlay(this.rootDoc)) {
newDoc.overlayX = this.rootDoc.x;
newDoc.overlayY = NumCast(this.rootDoc.y) + NumCast(this.rootDoc._height);
@@ -658,7 +659,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
CollectionFreeFormDocumentView={undefined}
dataFieldKey={this.fieldKey}
fieldKey={this.annotationKey}
- dictationKey={this.fieldKey + '-dictation'}
+ dictationKey={this.fieldKey + '_dictation'}
mediaPath={this.path}
renderDepth={this.props.renderDepth + 1}
startTag={'_timecodeToShow' /* audioStart */}
diff --git a/src/client/views/nodes/DocumentView.tsx b/src/client/views/nodes/DocumentView.tsx
index c5dead708..39c8d3348 100644
--- a/src/client/views/nodes/DocumentView.tsx
+++ b/src/client/views/nodes/DocumentView.tsx
@@ -1025,9 +1025,9 @@ export class DocumentViewInternal extends DocComponent<DocumentViewInternalProps
audio: true,
})
.then(function (stream) {
- let audioTextAnnos = Cast(dataDoc[field + '-audioAnnotations-text'], listSpec('string'), null);
+ let audioTextAnnos = Cast(dataDoc[field + '_audioAnnotations_text'], listSpec('string'), null);
if (audioTextAnnos) audioTextAnnos.push('');
- else audioTextAnnos = dataDoc[field + '-audioAnnotations-text'] = new List<string>(['']);
+ else audioTextAnnos = dataDoc[field + '_audioAnnotations_text'] = new List<string>(['']);
DictationManager.Controls.listen({
interimHandler: value => (audioTextAnnos[audioTextAnnos.length - 1] = value),
continuous: { indefinite: false },
@@ -1044,9 +1044,9 @@ export class DocumentViewInternal extends DocComponent<DocumentViewInternalProps
const [{ result }] = await Networking.UploadFilesToServer({ file: e.data });
if (!(result instanceof Error)) {
const audioField = new AudioField(result.accessPaths.agnostic.client);
- const audioAnnos = Cast(dataDoc[field + '-audioAnnotations'], listSpec(AudioField), null);
+ const audioAnnos = Cast(dataDoc[field + '_audioAnnotations'], listSpec(AudioField), null);
if (audioAnnos === undefined) {
- dataDoc[field + '-audioAnnotations'] = new List([audioField]);
+ dataDoc[field + '_audioAnnotations'] = new List([audioField]);
} else {
audioAnnos.push(audioField);
}
@@ -1067,7 +1067,7 @@ export class DocumentViewInternal extends DocComponent<DocumentViewInternalProps
playAnnotation = () => {
const self = this;
const audioAnnoState = this.dataDoc.audioAnnoState ?? 'stopped';
- const audioAnnos = Cast(this.dataDoc[this.LayoutFieldKey + '-audioAnnotations'], listSpec(AudioField), null);
+ const audioAnnos = Cast(this.dataDoc[this.LayoutFieldKey + '_audioAnnotations'], listSpec(AudioField), null);
const anno = audioAnnos?.lastElement();
if (anno instanceof AudioField && audioAnnoState === 'stopped') {
new Howl({
diff --git a/src/client/views/nodes/KeyValuePair.tsx b/src/client/views/nodes/KeyValuePair.tsx
index 01acdccb7..b0d041bdd 100644
--- a/src/client/views/nodes/KeyValuePair.tsx
+++ b/src/client/views/nodes/KeyValuePair.tsx
@@ -14,6 +14,8 @@ import './KeyValueBox.scss';
import './KeyValuePair.scss';
import React = require('react');
import { DocCast } from '../../../fields/Types';
+import { Tooltip } from '@material-ui/core';
+import { DocumentOptions, FInfo } from '../../documents/Documents';
// Represents one row in a key value plane
@@ -109,11 +111,13 @@ export class KeyValuePair extends React.Component<KeyValuePairProps> {
X
</button>
<input className="keyValuePair-td-key-check" type="checkbox" style={hover} onChange={this.handleCheck} ref={this.checkbox} />
- <div className="keyValuePair-keyField" style={{ marginLeft: 20 * (props.fieldKey.match(/_/g)?.length || 0), color: keyStyle }}>
- {'('.repeat(parenCount)}
- {props.fieldKey}
- {')'.repeat(parenCount)}
- </div>
+ <Tooltip title={Object.entries(new DocumentOptions()).find((pair: [string, FInfo]) => pair[0].replace(/^_/, '') === props.fieldKey)?.[1].description}>
+ <div className="keyValuePair-keyField" style={{ marginLeft: 20 * (props.fieldKey.match(/_/g)?.length || 0), color: keyStyle }}>
+ {'('.repeat(parenCount)}
+ {props.fieldKey}
+ {')'.repeat(parenCount)}
+ </div>
+ </Tooltip>
</div>
</td>
<td className="keyValuePair-td-value" style={{ width: `${100 - this.props.keyWidth}%` }} onContextMenu={this.onContextMenu}>
diff --git a/src/client/views/nodes/RecordingBox/RecordingBox.tsx b/src/client/views/nodes/RecordingBox/RecordingBox.tsx
index 04f11a5df..8fa2861b6 100644
--- a/src/client/views/nodes/RecordingBox/RecordingBox.tsx
+++ b/src/client/views/nodes/RecordingBox/RecordingBox.tsx
@@ -4,28 +4,28 @@ import * as React from 'react';
import { VideoField } from '../../../../fields/URLField';
import { Upload } from '../../../../server/SharedMediaTypes';
import { ViewBoxBaseComponent } from '../../DocComponent';
-import { FieldView } from '../FieldView';
+import { FieldView, FieldViewProps } from '../FieldView';
import { VideoBox } from '../VideoBox';
import { RecordingView } from './RecordingView';
import { DocumentType } from '../../../documents/DocumentTypes';
import { Presentation } from '../../../util/TrackMovements';
import { Doc } from '../../../../fields/Doc';
import { Id } from '../../../../fields/FieldSymbols';
-import { DocCast } from '../../../../fields/Types';
+import { BoolCast, DocCast } from '../../../../fields/Types';
+import { ScriptingGlobals } from '../../../util/ScriptingGlobals';
+import { DocumentManager } from '../../../util/DocumentManager';
+import { Docs } from '../../../documents/Documents';
@observer
-export class RecordingBox extends ViewBoxBaseComponent() {
+export class RecordingBox extends ViewBoxBaseComponent<FieldViewProps>() {
public static LayoutString(fieldKey: string) {
return FieldView.LayoutString(RecordingBox, fieldKey);
}
private _ref: React.RefObject<HTMLDivElement> = React.createRef();
- constructor(props: any) {
- super(props);
- }
-
componentDidMount() {
+ this.props.setContentView?.(this);
Doc.SetNativeWidth(this.dataDoc, 1280);
Doc.SetNativeHeight(this.dataDoc, 720);
}
@@ -46,20 +46,63 @@ export class RecordingBox extends ViewBoxBaseComponent() {
this.dataDoc.layout = VideoBox.LayoutString(this.fieldKey);
this.dataDoc[this.props.fieldKey] = new VideoField(this.result.accessPaths.client);
- this.dataDoc[this.fieldKey + '-recorded'] = true;
+ this.dataDoc[this.fieldKey + '_recorded'] = true;
// stringify the presentation and store it
if (presentation?.movements) {
const presCopy = { ...presentation };
presCopy.movements = presentation.movements.map(movement => ({ ...movement, doc: movement.doc[Id] })) as any;
- this.dataDoc[this.fieldKey + '-presentation'] = JSON.stringify(presCopy);
+ this.dataDoc[this.fieldKey + '_presentation'] = JSON.stringify(presCopy);
}
};
+ Record: undefined | (() => void);
+ Pause: undefined | (() => void);
+ Finish: undefined | (() => void);
+ getControls = (record: () => void, pause: () => void, finish: () => void) => {
+ this.Record = record;
+ this.Pause = pause;
+ this.Finish = finish;
+ };
+
render() {
return (
<div className="recordingBox" ref={this._ref}>
- {!this.result && <RecordingView setResult={this.setResult} setDuration={this.setVideoDuration} id={DocCast(this.rootDoc.proto)?.[Id] || ''} />}
+ {!this.result && (
+ <RecordingView
+ forceTrackScreen={BoolCast(this.layoutDoc[this.fieldKey + '_trackScreen'])}
+ getControls={this.getControls}
+ setResult={this.setResult}
+ setDuration={this.setVideoDuration}
+ id={DocCast(this.rootDoc.proto)?.[Id] || ''}
+ />
+ )}
</div>
);
}
+ static screengrabber: RecordingBox | undefined;
}
+ScriptingGlobals.add(function toggleRecording(_readOnly_: boolean) {
+ if (_readOnly_) return RecordingBox.screengrabber ? true : false;
+ if (RecordingBox.screengrabber) {
+ RecordingBox.screengrabber.Pause?.();
+ setTimeout(() => {
+ RecordingBox.screengrabber?.Finish?.();
+ RecordingBox.screengrabber!.rootDoc.overlayX = 100;
+ RecordingBox.screengrabber!.rootDoc.overlayY = 100;
+ RecordingBox.screengrabber = undefined;
+ }, 100);
+ } else {
+ const screengrabber = Docs.Create.WebCamDocument('', {
+ _width: 384,
+ _height: 216,
+ });
+ screengrabber.overlayX = -400;
+ screengrabber.overlayY = 0;
+ screengrabber[Doc.LayoutFieldKey(screengrabber) + '_trackScreen'] = true;
+ Doc.AddToMyOverlay(screengrabber);
+ DocumentManager.Instance.AddViewRenderedCb(screengrabber, docView => {
+ RecordingBox.screengrabber = docView.ComponentView as RecordingBox;
+ RecordingBox.screengrabber.Record?.();
+ });
+ }
+}, 'toggle recording');
diff --git a/src/client/views/nodes/RecordingBox/RecordingView.tsx b/src/client/views/nodes/RecordingBox/RecordingView.tsx
index 51eb774e2..0e386b093 100644
--- a/src/client/views/nodes/RecordingBox/RecordingView.tsx
+++ b/src/client/views/nodes/RecordingBox/RecordingView.tsx
@@ -21,6 +21,8 @@ interface IRecordingViewProps {
setResult: (info: Upload.AccessPathInfo, presentation?: Presentation) => void;
setDuration: (seconds: number) => void;
id: string;
+ getControls: (record: () => void, pause: () => void, finish: () => void) => void;
+ forceTrackScreen: boolean;
}
const MAXTIME = 100000;
@@ -60,14 +62,14 @@ export function RecordingView(props: IRecordingViewProps) {
useEffect(() => {
if (finished) {
// make the total presentation that'll match the concatted video
- let concatPres = trackScreen && TrackMovements.Instance.concatPresentations(videos.map(v => v.presentation as Presentation));
+ let concatPres = (trackScreen || props.forceTrackScreen) && TrackMovements.Instance.concatPresentations(videos.map(v => v.presentation as Presentation));
// this async function uses the server to create the concatted video and then sets the result to it's accessPaths
(async () => {
const videoFiles = videos.map((vid, i) => new File(vid.videoChunks, `segvideo${i}.mkv`, { type: vid.videoChunks[0].type, lastModified: Date.now() }));
// upload the segments to the server and get their server access paths
- const serverPaths: string[] = (await Networking.UploadFilesToServer(videoFiles.map(file => ({file})))).map(res => (res.result instanceof Error ? '' : res.result.accessPaths.agnostic.server));
+ const serverPaths: string[] = (await Networking.UploadFilesToServer(videoFiles.map(file => ({ file })))).map(res => (res.result instanceof Error ? '' : res.result.accessPaths.agnostic.server));
// concat the segments together using post call
const result: Upload.AccessPathInfo | Error = await Networking.PostToServer('/concatVideos', serverPaths);
@@ -132,7 +134,7 @@ export function RecordingView(props: IRecordingViewProps) {
videoRecorder.current.onstart = (event: any) => {
setRecording(true);
// start the recording api when the video recorder starts
- trackScreen && TrackMovements.Instance.start();
+ (trackScreen || props.forceTrackScreen) && TrackMovements.Instance.start();
};
videoRecorder.current.onstop = () => {
@@ -147,7 +149,7 @@ export function RecordingView(props: IRecordingViewProps) {
// depending on if a presenation exists, add it to the video
const presentation = TrackMovements.Instance.yieldPresentation();
- setVideos(videos => [...videos, presentation != null && trackScreen ? { ...nextVideo, presentation } : nextVideo]);
+ setVideos(videos => [...videos, presentation != null && (trackScreen || props.forceTrackScreen) ? { ...nextVideo, presentation } : nextVideo]);
}
// reset the temporary chunks
@@ -159,9 +161,7 @@ export function RecordingView(props: IRecordingViewProps) {
};
// if this is called, then we're done recording all the segments
- const finish = (e: React.PointerEvent) => {
- e.stopPropagation();
-
+ const finish = () => {
// call stop on the video recorder if active
videoRecorder.current?.state !== 'inactive' && videoRecorder.current?.stop();
@@ -176,8 +176,7 @@ export function RecordingView(props: IRecordingViewProps) {
setFinished(true);
};
- const pause = (e: React.PointerEvent) => {
- e.stopPropagation();
+ const pause = () => {
// if recording, then this is just a new segment
videoRecorder.current?.state === 'recording' && videoRecorder.current.stop();
};
@@ -217,6 +216,10 @@ export function RecordingView(props: IRecordingViewProps) {
return toTwoDigit(minutes) + ' : ' + toTwoDigit(seconds);
};
+ useEffect(() => {
+ props.getControls(record, pause, finish);
+ }, []);
+
return (
<div className="recording-container">
<div className="video-wrapper">
@@ -227,7 +230,19 @@ export function RecordingView(props: IRecordingViewProps) {
</div>
<div className="controls">
<div className="controls-inner-container">
- <div className="record-button-wrapper">{recording ? <button className="stop-button" onPointerDown={pause} /> : <button className="record-button" onPointerDown={start} />}</div>
+ <div className="record-button-wrapper">
+ {recording ? (
+ <button
+ className="stop-button"
+ onPointerDown={e => {
+ e.stopPropagation();
+ pause();
+ }}
+ />
+ ) : (
+ <button className="record-button" onPointerDown={start} />
+ )}
+ </div>
{!recording &&
(videos.length > 0 ? (
@@ -236,7 +251,12 @@ export function RecordingView(props: IRecordingViewProps) {
<MdBackspace onPointerDown={undoPrevious} />
</IconContext.Provider>
<IconContext.Provider value={{ color: '#cc1c08', className: 'video-edit-buttons' }}>
- <FaCheckCircle onPointerDown={finish} />
+ <FaCheckCircle
+ onPointerDown={e => {
+ e.stopPropagation();
+ finish();
+ }}
+ />
</IconContext.Provider>
</div>
) : (
@@ -244,7 +264,7 @@ export function RecordingView(props: IRecordingViewProps) {
<label className="track-screen">
<input
type="checkbox"
- checked={trackScreen}
+ checked={trackScreen || props.forceTrackScreen}
onChange={e => {
setTrackScreen(e.target.checked);
}}
diff --git a/src/client/views/nodes/ScreenshotBox.tsx b/src/client/views/nodes/ScreenshotBox.tsx
index 271ff3cf8..83a29f071 100644
--- a/src/client/views/nodes/ScreenshotBox.tsx
+++ b/src/client/views/nodes/ScreenshotBox.tsx
@@ -17,6 +17,7 @@ import { DocUtils } from '../../documents/Documents';
import { DocumentType } from '../../documents/DocumentTypes';
import { Networking } from '../../Network';
import { CaptureManager } from '../../util/CaptureManager';
+import { SettingsManager } from '../../util/SettingsManager';
import { CollectionFreeFormView } from '../collections/collectionFreeForm/CollectionFreeFormView';
import { CollectionStackedTimeline } from '../collections/CollectionStackedTimeline';
import { ContextMenu } from '../ContextMenu';
@@ -25,7 +26,6 @@ import { FieldView, FieldViewProps } from './FieldView';
import { FormattedTextBox } from './formattedText/FormattedTextBox';
import './ScreenshotBox.scss';
import { VideoBox } from './VideoBox';
-import { SettingsManager } from '../../util/SettingsManager';
declare class MediaRecorder {
constructor(e: any, options?: any); // whatever MediaRecorder has
@@ -117,7 +117,7 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatabl
@observable private _videoRef: HTMLVideoElement | null = null;
@observable _screenCapture = false;
@computed get recordingStart() {
- return Cast(this.dataDoc[this.props.fieldKey + '-recordingStart'], DateField)?.date.getTime();
+ return Cast(this.dataDoc[this.props.fieldKey + '_recordingStart'], DateField)?.date.getTime();
}
constructor(props: any) {
@@ -227,13 +227,13 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatabl
this._audioRec.onstop = async (e: any) => {
const [{ result }] = await Networking.UploadFilesToServer(aud_chunks.map((file: any) => ({ file })));
if (!(result instanceof Error)) {
- this.dataDoc[this.props.fieldKey + '-audio'] = new AudioField(result.accessPaths.agnostic.client);
+ this.dataDoc[this.props.fieldKey + '_audio'] = new AudioField(result.accessPaths.agnostic.client);
}
};
this._videoRef!.srcObject = await (navigator.mediaDevices as any).getDisplayMedia({ video: true });
this._videoRec = new MediaRecorder(this._videoRef!.srcObject);
const vid_chunks: any = [];
- this._videoRec.onstart = () => (this.dataDoc[this.props.fieldKey + '-recordingStart'] = new DateField(new Date()));
+ this._videoRec.onstart = () => (this.dataDoc[this.props.fieldKey + '_recordingStart'] = new DateField(new Date()));
this._videoRec.ondataavailable = (e: any) => vid_chunks.push(e.data);
this._videoRec.onstop = async (e: any) => {
const file = new File(vid_chunks, `${this.rootDoc[Id]}.mkv`, { type: vid_chunks[0].type, lastModified: Date.now() });
@@ -270,14 +270,15 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatabl
};
setupDictation = () => {
- if (this.dataDoc[this.fieldKey + '-dictation']) return;
+ if (this.dataDoc[this.fieldKey + '_dictation']) return;
const dictationText = DocUtils.GetNewTextDoc('dictation', NumCast(this.rootDoc.x), NumCast(this.rootDoc.y) + NumCast(this.layoutDoc._height) + 10, NumCast(this.layoutDoc._width), 2 * NumCast(this.layoutDoc._height));
+ const textField = Doc.LayoutFieldKey(dictationText);
dictationText._layout_autoHeight = false;
const dictationTextProto = Doc.GetProto(dictationText);
- dictationTextProto.recordingSource = this.dataDoc;
- dictationTextProto.recordingStart = ComputedField.MakeFunction(`self.recordingSource["${this.props.fieldKey}-recordingStart"]`);
- dictationTextProto.mediaState = ComputedField.MakeFunction('self.recordingSource.mediaState');
- this.dataDoc[this.fieldKey + '-dictation'] = dictationText;
+ dictationTextProto[`${textField}_recordingSource`] = this.dataDoc;
+ dictationTextProto[`${textField}_recordingStart`] = ComputedField.MakeFunction(`self.${textField}_recordingSource.${this.fieldKey}_recordingStart`);
+ dictationTextProto.mediaState = ComputedField.MakeFunction(`self.${textField}_recordingSource.mediaState`);
+ this.dataDoc[this.fieldKey + '_dictation'] = dictationText;
};
videoPanelHeight = () => (NumCast(this.dataDoc[this.fieldKey + '_nativeHeight'], this.layoutDoc[Height]()) / NumCast(this.dataDoc[this.fieldKey + '_nativeWidth'], this.layoutDoc[Width]())) * this.props.PanelWidth();
formattedPanelHeight = () => Math.max(0, this.props.PanelHeight() - this.videoPanelHeight());
@@ -314,10 +315,10 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatabl
</CollectionFreeFormView>
</div>
<div style={{ background: SettingsManager.Instance.userColor, position: 'relative', height: this.formattedPanelHeight() }}>
- {!(this.dataDoc[this.fieldKey + '-dictation'] instanceof Doc) ? null : (
+ {!(this.dataDoc[this.fieldKey + '_dictation'] instanceof Doc) ? null : (
<FormattedTextBox
{...this.props}
- Document={DocCast(this.dataDoc[this.fieldKey + '-dictation'])}
+ Document={DocCast(this.dataDoc[this.fieldKey + '_dictation'])}
fieldKey={'text'}
PanelHeight={this.formattedPanelHeight}
select={emptyFunction}
diff --git a/src/client/views/nodes/VideoBox.tsx b/src/client/views/nodes/VideoBox.tsx
index 1f52c2d92..2177adeff 100644
--- a/src/client/views/nodes/VideoBox.tsx
+++ b/src/client/views/nodes/VideoBox.tsx
@@ -103,14 +103,14 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
// returns the path of the audio file
@computed get audiopath() {
- const field = Cast(this.props.Document[this.props.fieldKey + '-audio'], AudioField, null);
+ const field = Cast(this.props.Document[this.props.fieldKey + '_audio'], AudioField, null);
const vfield = Cast(this.dataDoc[this.fieldKey], VideoField, null);
return field?.url.href ?? vfield?.url.href ?? '';
}
// returns the presentation data if it exists, null otherwise
@computed get presentation() {
- const data = this.dataDoc[this.fieldKey + '-presentation'];
+ const data = this.dataDoc[this.fieldKey + '_presentation'];
return data ? JSON.parse(StrCast(data)) : null;
}
@@ -524,7 +524,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
icon: 'expand-arrows-alt',
});
// if the videobox was turned from a recording box
- if (this.dataDoc[this.fieldKey + '-recorded'] === true) {
+ if (this.dataDoc[this.fieldKey + '_recorded'] === true) {
subitems.push({
description: 'Recreate recording',
event: () => {
@@ -533,7 +533,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this.dataDoc[this.props.fieldKey] = '';
this.dataDoc[this.fieldKey + '_duration'] = '';
// delete assoicated presentation data
- this.dataDoc[this.fieldKey + '-presentation'] = '';
+ this.dataDoc[this.fieldKey + '_presentation'] = '';
},
icon: 'expand-arrows-alt',
});
@@ -959,7 +959,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
{...this.props}
dataFieldKey={this.fieldKey}
fieldKey={this.annotationKey}
- dictationKey={this.fieldKey + '-dictation'}
+ dictationKey={this.fieldKey + '_dictation'}
mediaPath={this.audiopath}
thumbnails={() => StrListCast(this.dataDoc[this.fieldKey + '_thumbnails'])}
renderDepth={this.props.renderDepth + 1}
diff --git a/src/client/views/nodes/formattedText/FormattedTextBox.tsx b/src/client/views/nodes/formattedText/FormattedTextBox.tsx
index eea4f513e..2afbbb457 100644
--- a/src/client/views/nodes/formattedText/FormattedTextBox.tsx
+++ b/src/client/views/nodes/formattedText/FormattedTextBox.tsx
@@ -156,7 +156,7 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps
return this.dataDoc?.mediaState === 'recording';
}
set _recording(value) {
- !this.dataDoc.recordingSource && (this.dataDoc.mediaState = value ? 'recording' : undefined);
+ !this.dataDoc[`${this.fieldKey}_recordingSource`] && (this.dataDoc.mediaState = value ? 'recording' : undefined);
}
@computed get config() {
this._keymap = buildKeymap(schema, this.props);
@@ -1262,9 +1262,7 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps
() => this._recording,
() => {
this.stopDictation(true);
- if (this._recording) {
- this.recordDictation();
- }
+ this._recording && this.recordDictation();
}
);
if (this._recording) setTimeout(this.recordDictation);