diff options
author | bobzel <zzzman@gmail.com> | 2021-01-29 13:47:31 -0500 |
---|---|---|
committer | bobzel <zzzman@gmail.com> | 2021-01-29 13:47:31 -0500 |
commit | 80362228b691fd55b569f0f507c4ee9667644559 (patch) | |
tree | 0711711ac2a9e07d5d6a15e375b6ddd56f81e068 /src | |
parent | 42d8bd5f673341682452c7c1f59b6b4b3a33d346 (diff) |
changed how auto linking to audio is implemented. added audiotag html tags to click to play audio.
Diffstat (limited to 'src')
-rw-r--r-- | src/client/views/nodes/AudioBox.tsx | 11 | ||||
-rw-r--r-- | src/client/views/nodes/formattedText/FormattedTextBox.scss | 11 | ||||
-rw-r--r-- | src/client/views/nodes/formattedText/FormattedTextBox.tsx | 37 | ||||
-rw-r--r-- | src/client/views/nodes/formattedText/nodes_rts.ts | 38 |
4 files changed, 75 insertions, 22 deletions
diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 6b9d12ac0..57b5f3ec7 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -43,7 +43,6 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps, AudioD _disposers: { [name: string]: IReactionDisposer } = {}; _ele: HTMLAudioElement | null = null; - _audioRef = React.createRef<HTMLDivElement>(); _stackedTimeline = React.createRef<CollectionStackedTimeline>(); _recorder: any; _recordStart = 0; @@ -105,26 +104,26 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps, AudioD this.audioState = this.path ? "paused" : undefined; - this._disposers.scrubbing = reaction(() => AudioBox._scrubTime, (time) => this.layoutDoc.playOnSelect && this.playFromTime(AudioBox._scrubTime)); + //this._disposers.scrubbing = reaction(() => AudioBox._scrubTime, (time) => this.layoutDoc.playOnSelect && this.playFromTime(AudioBox._scrubTime)); this._disposers.triggerAudio = reaction( () => !LinkDocPreview.TargetDoc && !FormattedTextBoxComment.linkDoc && this.props.renderDepth !== -1 ? NumCast(this.Document._triggerAudio, null) : undefined, start => start !== undefined && setTimeout(() => { - this._audioRef.current && this.playFrom(start); + this.playFrom(start); setTimeout(() => { this.Document._currentTimecode = start; this.Document._triggerAudio = undefined; }, 10); - }, this._audioRef.current ? 0 : 250), // wait for mainCont and try again to play + }), // wait for mainCont and try again to play { fireImmediately: true } ); this._disposers.audioStop = reaction( () => this.props.renderDepth !== -1 && !LinkDocPreview.TargetDoc && !FormattedTextBoxComment.linkDoc ? Cast(this.Document._audioStop, "number", null) : undefined, audioStop => audioStop !== undefined && setTimeout(() => { - this._audioRef.current && this.Pause(); + this.Pause(); setTimeout(() => this.Document._audioStop = undefined, 10); - }, this._audioRef.current ? 0 : 250), // wait for mainCont and try again to play + }), // wait for mainCont and try again to play { fireImmediately: true } ); } diff --git a/src/client/views/nodes/formattedText/FormattedTextBox.scss b/src/client/views/nodes/formattedText/FormattedTextBox.scss index b04f60500..866f556ff 100644 --- a/src/client/views/nodes/formattedText/FormattedTextBox.scss +++ b/src/client/views/nodes/formattedText/FormattedTextBox.scss @@ -10,6 +10,17 @@ outline: none !important; } +audiotag { + left: 0; + position: absolute; + cursor: pointer; + border-radius: 10px; + width: 10px; + margin-top: -2px; + font-size: 4px; + background: lightblue; +} + .formattedTextBox-cont { touch-action: none; background: inherit; diff --git a/src/client/views/nodes/formattedText/FormattedTextBox.tsx b/src/client/views/nodes/formattedText/FormattedTextBox.tsx index 6914c20b4..d73fd9208 100644 --- a/src/client/views/nodes/formattedText/FormattedTextBox.tsx +++ b/src/client/views/nodes/formattedText/FormattedTextBox.tsx @@ -343,20 +343,16 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp pause = () => this._pause = true; - formatTime = (time: number) => { - const hours = Math.floor(time / 60 / 60); - const minutes = Math.floor(time / 60) - (hours * 60); - const seconds = time % 60; - - return hours.toString().padStart(2, '0') + ':' + minutes.toString().padStart(2, '0') + ':' + seconds.toString().padStart(2, '0'); - } - // for inserting timestamps insertTime = () => { let linkTime; + let linkAnchor; DocListCast(this.dataDoc.links).forEach((l, i) => { const anchor = (l.anchor1 as Doc).annotationOn ? l.anchor1 as Doc : (l.anchor2 as Doc).annotationOn ? (l.anchor2 as Doc) : undefined; - if (anchor && (anchor.annotationOn as Doc).audioState === "recording") linkTime = NumCast(anchor.audioStart); + if (anchor && (anchor.annotationOn as Doc).audioState === "recording") { + linkTime = NumCast(anchor.audioStart); + linkAnchor = anchor; + } }); if (this._editorView) { const state = this._editorView.state; @@ -374,14 +370,13 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp } const path = (this._editorView.state.selection.$from as any).path; - if (linkTime && path[path.length - 3].type !== this._editorView.state.schema.nodes.code_block) { - const time = this.formatTime(Math.round(linkTime + Date.now() / 1000 - this._recordingStart / 1000)); + if (linkAnchor && linkTime && path[path.length - 3].type !== this._editorView.state.schema.nodes.code_block) { + const time = linkTime + Date.now() / 1000 - this._recordingStart / 1000; this._break = false; - const value = (this.layoutDoc._timeStampOnEnter ? "" : "\n") + "[" + time + "]"; const from = state.selection.from; - const para = this._editorView.state.schema.nodes.paragraph.create(); - const replaced = this._editorView.state.tr.insertText(value).addMark(from, from + value.length + 1, mark).insert(from + value.length, para); - this._editorView.dispatch(replaced.setSelection(new TextSelection(replaced.doc.resolve(from + value.length + 1)))); + const value = this._editorView.state.schema.nodes.audiotag.create({ timeCode: time, audioId: linkAnchor[Id] }); + const replaced = this._editorView.state.tr.insert(from - 1, value); + this._editorView.dispatch(replaced.setSelection(new TextSelection(replaced.doc.resolve(from + 1)))); } } } @@ -1299,6 +1294,18 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<(FieldViewProp _break = false; _collapsed = false; onPointerDown = (e: React.PointerEvent): void => { + if ((e.target as any).tagName === "AUDIOTAG") { + e.preventDefault(); + e.stopPropagation(); + const time = (e.target as any)?.dataset?.timecode || 0; + const audioid = (e.target as any)?.dataset?.audioid || 0; + DocServer.GetRefField(audioid).then(anchor => { + if (anchor instanceof Doc) { + const audiodoc = anchor.annotationOn as Doc; + audiodoc._triggerAudio = Number(time); + } + }); + } if (this._recording && !e.ctrlKey && e.button === 0) { this.stopDictation(true); this._break = true; diff --git a/src/client/views/nodes/formattedText/nodes_rts.ts b/src/client/views/nodes/formattedText/nodes_rts.ts index 64f7d27e5..f5bc05a2d 100644 --- a/src/client/views/nodes/formattedText/nodes_rts.ts +++ b/src/client/views/nodes/formattedText/nodes_rts.ts @@ -6,6 +6,14 @@ import { ParagraphNodeSpec, toParagraphDOM, getParagraphNodeAttrs } from "./Para const blockquoteDOM: DOMOutputSpecArray = ["blockquote", 0], hrDOM: DOMOutputSpecArray = ["hr"], preDOM: DOMOutputSpecArray = ["pre", ["code", 0]], brDOM: DOMOutputSpecArray = ["br"], ulDOM: DOMOutputSpecArray = ["ul", 0]; +function formatAudioTime(time: number) { + time = Math.round(time); + const hours = Math.floor(time / 60 / 60); + const minutes = Math.floor(time / 60) - (hours * 60); + const seconds = time % 60; + + return minutes.toString().padStart(2, '0') + ':' + seconds.toString().padStart(2, '0'); +} // :: Object // [Specs](#model.NodeSpec) for the nodes defined in this schema. export const nodes: { [index: string]: NodeSpec } = { @@ -14,6 +22,34 @@ export const nodes: { [index: string]: NodeSpec } = { content: "block+" }, + audiotag: { + group: "block", + attrs: { + timeCode: { default: 0 }, + audioId: { default: "" } + }, + toDOM(node) { + return ['audiotag', + { + // style: see FormattedTextBox.scss + "data-timecode": node.attrs.timeCode, + "data-audioid": node.attrs.audioId, + }, + formatAudioTime(node.attrs.timeCode.toString()) + ] + }, + parseDOM: [ + { + tag: "audiotag", getAttrs(dom: any) { + return { + timeCode: dom.getAttribute("data-timecode"), + audioId: dom.getAttribute("data-audioid") + }; + } + }, + ] + }, + footnote: { group: "inline", content: "inline*", @@ -315,7 +351,7 @@ export const nodes: { [index: string]: NodeSpec } = { mapStyle: { default: "decimal" }, // "decimal", "multi", "bullet" visibility: { default: true } }, - content: 'paragraph+ | (paragraph ordered_list)', + content: '(paragraph|audiotag)+ | ((paragraph|audiotag)+ ordered_list)', parseDOM: [{ tag: "li", getAttrs(dom: any) { return { mapStyle: dom.getAttribute("data-mapStyle"), bulletStyle: dom.getAttribute("data-bulletStyle") }; |