From a793b7e981e46b7c98bee6cce3faaf3a5b05f6ae Mon Sep 17 00:00:00 2001 From: bobzel Date: Sun, 26 Sep 2021 02:55:23 -0400 Subject: fixed warnings, fixed bug following link w/auto play. plus refactorings. --- src/client/views/AudioWaveform.tsx | 11 +- .../collections/CollectionStackedTimeline.tsx | 78 ++-- src/client/views/nodes/AudioBox.tsx | 291 +++++++-------- src/client/views/nodes/LabelBox.tsx | 2 +- src/client/views/nodes/VideoBox.tsx | 405 ++++++++++----------- 5 files changed, 373 insertions(+), 414 deletions(-) (limited to 'src') diff --git a/src/client/views/AudioWaveform.tsx b/src/client/views/AudioWaveform.tsx index 0e9c00656..270b3869c 100644 --- a/src/client/views/AudioWaveform.tsx +++ b/src/client/views/AudioWaveform.tsx @@ -25,14 +25,13 @@ export interface AudioWaveformProps { export class AudioWaveform extends React.Component { public static NUMBER_OF_BUCKETS = 100; _disposer: IReactionDisposer | undefined; - @computed get _waveHeight() { - return Math.max(50, this.props.PanelHeight()); - } - + @computed get waveHeight() { return Math.max(50, this.props.PanelHeight()); } @computed get clipStart() { return this.props.clipStart; } @computed get clipEnd() { return this.props.clipEnd; } - audioBucketField = (start: number, end: number) => { return "audioBuckets/" + start.toFixed(2).replace(".", "_") + "/" + end.toFixed(2).replace(".", "_"); } @computed get audioBuckets() { return Cast(this.props.layoutDoc[this.audioBucketField(this.clipStart, this.clipEnd)], listSpec("number"), []); } + + audioBucketField = (start: number, end: number) => "audioBuckets/" + start.toFixed(2).replace(".", "_") + "/" + end.toFixed(2).replace(".", "_"); + componentWillUnmount() { this._disposer?.(); } @@ -87,7 +86,7 @@ export class AudioWaveform extends React.Component {
- ); - } - constructor(props: any) { super(props); // onClick play scripts @@ -157,8 +129,23 @@ export class CollectionStackedTimeline extends CollectionSubView< } } - anchorStart = (anchor: Doc) => NumCast(anchor._timecodeToShow, NumCast(anchor[this.props.startTag])) - anchorEnd = (anchor: Doc, val: any = null) => NumCast(anchor._timecodeToHide, NumCast(anchor[this.props.endTag], val)); + public get IsTrimming() { return this._trimming; } + + @action + public StartTrimming(scope: TrimScope) { + this._trimStart = this.clipStart; + this._trimEnd = this.clipEnd; + this._trimming = scope; + } + @action + public StopTrimming() { + this.layoutDoc.clipStart = this.trimStart; + this.layoutDoc.clipEnd = this.trimEnd; + this._trimming = TrimScope.None; + } + + anchorStart = (anchor: Doc) => NumCast(anchor._timecodeToShow, NumCast(anchor[this.props.startTag])); + anchorEnd = (anchor: Doc, val: any = null) => NumCast(anchor._timecodeToHide, NumCast(anchor[this.props.endTag], val) ?? null); toTimeline = (screen_delta: number, width: number) => { return Math.max( this.clipStart, @@ -345,13 +332,13 @@ export class CollectionStackedTimeline extends CollectionSubView< const localPt = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); const x = localPt[0] - docDragData.offset[0]; const timelinePt = this.toTimeline(x, this.props.PanelWidth()); - for (let i = 0; i < docDragData.droppedDocuments.length; i++) { - const d = Doc.GetProto(docDragData.droppedDocuments[i]); + docDragData.droppedDocuments.forEach(drop => { + const d = Doc.GetProto(drop); if (this.anchorEnd(d) !== undefined) { d[d._timecodeToHide === undefined ? this.props.endTag : "_timecodeToHide"] = timelinePt + this.anchorEnd(d) - this.anchorStart(d); } d[d._timecodToShow === undefined ? this.props.startTag : "_timecodToShow"] = timelinePt; - } + }); return true; } @@ -483,9 +470,12 @@ export class CollectionStackedTimeline extends CollectionSubView< } dictationHeightPercent = 50; - dictationHeight = () => (this.props.PanelHeight() * (100 - this.dictationHeightPercent)) / 100 - timelineContentHeight = () => (this.props.PanelHeight() * this.dictationHeightPercent) / 100 - dictationScreenToLocalTransform = () => this.props.ScreenToLocalTransform().translate(0, -this.timelineContentHeight()) + dictationHeight = () => (this.props.PanelHeight() * (100 - this.dictationHeightPercent)) / 100; + timelineContentHeight = () => (this.props.PanelHeight() * this.dictationHeightPercent) / 100; + dictationScreenToLocalTransform = () => this.props.ScreenToLocalTransform().translate(0, -this.timelineContentHeight()); + isContentActive = () => this.props.isSelected() || this.props.isContentActive(); + currentTimecode = () => this.currentTime; + @computed get renderDictation() { const dictation = Cast(this.dataDoc[this.props.dictationKey], Doc, null); return !dictation ? null : ( @@ -537,9 +527,19 @@ export class CollectionStackedTimeline extends CollectionSubView<
); } + @computed get selectionContainer() { + const markerEnd = CollectionStackedTimeline.SelectingRegion === this ? this.currentTime : this._markerEnd; + return markerEnd === undefined ? null : ( +
+ ); + } - isContentActive = () => this.props.isSelected() || this.props.isContentActive(); - currentTimecode = () => this.currentTime; render() { const timelineContentWidth = this.props.PanelWidth(); const overlaps: { @@ -760,7 +760,7 @@ class StackedTimelineAnchor extends React.Component e, (e) => { if (!undo) undo = UndoManager.StartBatch("drag anchor"); - return changeAnchor(anchor, left, newTime(e)) + return changeAnchor(anchor, left, newTime(e)); }, (e) => { this.props.setTime(newTime(e)); diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 6e6558030..fa78d2301 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -1,6 +1,6 @@ import React = require("react"); import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; -import { action, computed, IReactionDisposer, observable, reaction, runInAction } from "mobx"; +import { action, computed, IReactionDisposer, observable, runInAction } from "mobx"; import { observer } from "mobx-react"; import { DateField } from "../../../fields/DateField"; import { Doc, DocListCast } from "../../../fields/Doc"; @@ -19,10 +19,8 @@ import { CollectionStackedTimeline, TrimScope } from "../collections/CollectionS import { ContextMenu } from "../ContextMenu"; import { ContextMenuProps } from "../ContextMenuItem"; import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from "../DocComponent"; -import { Colors } from "../global/globalEnums"; import "./AudioBox.scss"; import { FieldView, FieldViewProps } from "./FieldView"; -import { LinkDocPreview } from "./LinkDocPreview"; declare class MediaRecorder { constructor(e: any); // whatever MediaRecorder has @@ -36,10 +34,14 @@ enum media_state { Recording = "recording", Paused = "paused", Playing = "playing" -}; +} @observer export class AudioBox extends ViewBoxAnnotatableComponent(AudioDocument) { public static LayoutString(fieldKey: string) { return FieldView.LayoutString(AudioBox, fieldKey); } + public static SetScrubTime = action((timeInMillisFrom1970: number) => { + AudioBox._scrubTime = 0; + AudioBox._scrubTime = timeInMillisFrom1970; + }); public static Enabled = false; static playheadWidth = 40; // width of playhead static heightPercent = 75; // height of timeline in percent of height of audioBox. @@ -63,13 +65,30 @@ export class AudioBox extends ViewBoxAnnotatableComponent { - AudioBox._scrubTime = 0; - AudioBox._scrubTime = timeInMillisFrom1970; - }); + get timeline() { return this._stackedTimeline.current; } // can't be computed since it's not observable + + componentWillUnmount() { + this._dropDisposer?.(); + Object.values(this._disposers).forEach((disposer) => disposer?.()); + const ind = DocUtils.ActiveRecordings.indexOf(this); + ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); + } + + @action + componentDidMount() { + this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link. + + this.mediaState = this.path ? media_state.Paused : undefined as any as media_state; + + this.path && this.setAnchorTime(NumCast(this.layoutDoc.clipStart)); + this.path && this.timecodeChanged(); + } getLinkData(l: Doc) { let la1 = l.anchor1 as Doc; @@ -100,34 +119,15 @@ export class AudioBox extends ViewBoxAnnotatableComponent disposer?.()); - const ind = DocUtils.ActiveRecordings.indexOf(this); - ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); - } - - @action - componentDidMount() { - this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link. - - this.mediaState = this.path ? media_state.Paused : undefined as any as media_state; - - this.path && this.setAnchorTime(NumCast(this.layoutDoc.clipStart)); - this.path && this.timecodeChanged(); - } - // for updating the timecode @action timecodeChanged = () => { if (this.mediaState !== media_state.Recording && this._ele) { this.links - .map((l) => this.getLinkData(l)) + .map(l => this.getLinkData(l)) .forEach(({ la1, la2, linkTime }) => { - if ( - linkTime > NumCast(this.layoutDoc._currentTimecode) && - linkTime < this._ele!.currentTime - ) { + if (linkTime > NumCast(this.layoutDoc._currentTimecode) && + linkTime < this._ele!.currentTime) { Doc.linkFollowHighlight(la1); } }); @@ -135,23 +135,11 @@ export class AudioBox extends ViewBoxAnnotatableComponent { - this._ele!.pause(); - this.mediaState = media_state.Paused; - }); - - // play audio for documents created during recording - playFromTime = (absoluteTime: number) => { - this.recordingStart && - this.playFrom((absoluteTime - this.recordingStart) / 1000); - } - // play back the audio from time @action - playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false): any => { - clearTimeout(this._play); - if (Number.isNaN(this._ele?.duration)) { + playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false) => { + clearTimeout(this._play); // abort any previous clip ending + if (Number.isNaN(this._ele?.duration)) { // audio element isn't loaded yet... wait 1/2 second and try again setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500); } else if (this.timeline && this._ele && AudioBox.Enabled) { @@ -160,18 +148,13 @@ export class AudioBox extends ViewBoxAnnotatableComponent= 0 && this.timeline.trimStart <= end && seekTimeInSeconds <= this.timeline.trimEnd) { this._ele.currentTime = start; this._ele.play(); - runInAction(() => this.mediaState = media_state.Playing); - if (end !== this.timeline.clipDuration) { - return this._play = setTimeout( - () => { - if (fullPlay) this.setAnchorTime(this.timeline!.trimStart); - this.Pause(); - }, - (end - start) * 1000 - ); // use setTimeout to play a specific duration - } - } else if (seekTimeInSeconds < 0 && seekTimeInSeconds > -1) { - setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000); + this.mediaState = media_state.Playing; + this._play = setTimeout( + () => { + if (fullPlay) this.setAnchorTime(this.timeline!.trimStart); + this.Pause(); + }, + (end - start) * 1000); } else { this.Pause(); } @@ -185,8 +168,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent this.mediaState = media_state.Recording); - setTimeout(this.updateRecordTime, 0); + setTimeout(this.updateRecordTime); this._recorder.start(); - setTimeout(() => this.stopRecording(), 60 * 60 * 1000); // stop after an hour + setTimeout(this.stopRecording, 60 * 60 * 1000); // stop after an hour + } + + @action + stopRecording = () => { + if (this._recorder) { + this._recorder.stop(); + this._recorder = undefined; + this.dataDoc[this.fieldKey + "-duration"] = (new Date().getTime() - this._recordStart - this.pauseTime) / 1000; + this.mediaState = media_state.Paused; + this._stream?.getAudioTracks()[0].stop(); + const ind = DocUtils.ActiveRecordings.indexOf(this); + ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); + } } // context menu @@ -243,22 +238,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent { - if (this._recorder) { - this._recorder.stop(); - this._recorder = undefined; - this.dataDoc[this.fieldKey + "-duration"] = - (new Date().getTime() - this._recordStart - this.pauseTime) / 1000; - this.mediaState = media_state.Paused; - this._stream?.getAudioTracks()[0].stop(); - const ind = DocUtils.ActiveRecordings.indexOf(this); - ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1); - } - }); - // button for starting and stopping the recording - recordClick = (e: React.MouseEvent) => { + Record = (e: React.MouseEvent) => { if (e.button === 0 && !e.ctrlKey) { this._recorder ? this.stopRecording() : this.recordAudioAnnotation(); e.stopPropagation(); @@ -267,12 +248,19 @@ export class AudioBox extends ViewBoxAnnotatableComponent { - const eleTime = this._ele!.currentTime; - const start = eleTime === this.timeline?.trimDuration ? this.timeline.trimStart : eleTime; + const eleTime = this._ele?.currentTime || 0; + const start = eleTime === this.timeline?.trimEnd ? this.timeline.trimStart : eleTime; this.playFrom(start, undefined, true); e?.stopPropagation?.(); } + // pause play back + @action + Pause = () => { + this._ele?.pause(); + this.mediaState = media_state.Paused; + } + // creates a text document for dictation onFile = (e: any) => { const newDoc = CurrentUserUtils.GetNewTextDoc( @@ -302,27 +290,6 @@ export class AudioBox extends ViewBoxAnnotatableComponent { - const duration = this._ele?.duration; - if (duration && duration !== Infinity) { - this.dataDoc[this.fieldKey + "-duration"] = duration; - } - })} - className={`audiobox-control${this.props.isContentActive() ? "-interactive" : ""}`}> - - Not supported. - ; - } - // pause the time during recording phase @action recordPause = (e: React.MouseEvent) => { @@ -341,14 +308,12 @@ export class AudioBox extends ViewBoxAnnotatableComponent this.mediaState === media_state.Playing; playLink = (link: Doc) => { if (link.annotationOn === this.rootDoc) { if (!this.layoutDoc.dontAutoPlayFollowedLinks) { this.playFrom(this.timeline?.anchorStart(link) || 0, this.timeline?.anchorEnd(link)); } else { - this._ele!.currentTime = this.layoutDoc._currentTimecode = - this.timeline?.anchorStart(link) || 0; + this._ele!.currentTime = this.layoutDoc._currentTimecode = this.timeline?.anchorStart(link) || 0; } } else { this.links @@ -368,6 +333,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent this.mediaState === media_state.Playing; isActiveChild = () => this._isAnyChildContentActive; timelineWhenChildContentsActiveChanged = (isActive: boolean) => this.props.whenChildContentsActiveChanged( @@ -380,54 +346,21 @@ export class AudioBox extends ViewBoxAnnotatableComponent { - (this._ele!.currentTime = this.layoutDoc._currentTimecode = time); - } + setAnchorTime = (time: number) => this._ele!.currentTime = this.layoutDoc._currentTimecode = time; + timelineWidth = () => this.props.PanelWidth() - AudioBox.playheadWidth; timelineHeight = () => (((this.props.PanelHeight() * AudioBox.heightPercent) / 100) * AudioBox.heightPercent) / 100 // panelHeight * heightPercent is player height. * heightPercent is timeline height (as per css inline) - timelineWidth = () => this.props.PanelWidth() - AudioBox.playheadWidth; - @computed get renderTimeline() { - return ( - - ); - } - // hides trim controls and displays new clip + + @undoBatch - finishTrim = action(() => { + finishTrim = () => { // hides trim controls and displays new clip this.Pause(); this.setAnchorTime(Math.max(Math.min(this.timeline?.trimEnd || 0, this._ele!.currentTime), this.timeline?.trimStart || 0)); this.timeline?.StopTrimming(); - }); + } + startTrim = (scope: TrimScope) => { this.Pause(); this.timeline?.StartTrimming(scope); @@ -444,6 +377,18 @@ export class AudioBox extends ViewBoxAnnotatableComponent { + if (r && this.timeline) { + this._dropDisposer?.(); + this._dropDisposer = DragManager.MakeDropTarget(r, + (e, de) => { + const [xp, yp] = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); + de.complete.docDragData && this.timeline!.internalDocDrop(e, de, de.complete.docDragData, xp); + }, + this.layoutDoc, undefined); + } + } + @computed get recordingControls() { return
@@ -453,8 +398,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent
{[media_state.Recording, media_state.Playing].includes(this.mediaState) ? -
e.stopPropagation()}> -
+
e.stopPropagation()}> +
@@ -522,16 +467,52 @@ export class AudioBox extends ViewBoxAnnotatableComponent; } - setupTimelineDrop = (r: HTMLDivElement | null) => { - if (r && this.timeline) { - this._dropDisposer?.(); - this._dropDisposer = DragManager.MakeDropTarget(r, - (e, de) => { - const [xp, yp] = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); - de.complete.docDragData && this.timeline!.internalDocDrop(e, de, de.complete.docDragData, xp); - }, - this.layoutDoc, undefined); - } + @computed get renderTimeline() { + return ( + + ); + } + // returns the html audio element + @computed get audio() { + return ; } render() { diff --git a/src/client/views/nodes/LabelBox.tsx b/src/client/views/nodes/LabelBox.tsx index 935c878ee..97b1aac86 100644 --- a/src/client/views/nodes/LabelBox.tsx +++ b/src/client/views/nodes/LabelBox.tsx @@ -111,7 +111,7 @@ export class LabelBox extends ViewBoxBaseComponent<(FieldViewProps & LabelBoxPro verticalAlign: "center", textAlign: "center", whiteSpace: "nowrap" - }) + }); } }}>{label.startsWith("#") ? (null) : label}
diff --git a/src/client/views/nodes/VideoBox.tsx b/src/client/views/nodes/VideoBox.tsx index 3435c2a24..2befb4128 100644 --- a/src/client/views/nodes/VideoBox.tsx +++ b/src/client/views/nodes/VideoBox.tsx @@ -1,5 +1,6 @@ import React = require("react"); import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; +import { Tooltip } from "@material-ui/core"; import { action, computed, IReactionDisposer, observable, ObservableMap, reaction, runInAction, untracked } from "mobx"; import { observer } from "mobx-react"; import * as rp from 'request-promise'; @@ -8,13 +9,16 @@ import { documentSchema } from "../../../fields/documentSchemas"; import { InkTool } from "../../../fields/InkField"; import { makeInterface } from "../../../fields/Schema"; import { Cast, NumCast, StrCast } from "../../../fields/Types"; -import { AudioField, nullAudio, VideoField } from "../../../fields/URLField"; -import { emptyFunction, formatTime, OmitKeys, returnOne, setupMoveUpEvents, Utils, returnFalse, returnZero } from "../../../Utils"; +import { AudioField, VideoField } from "../../../fields/URLField"; +import { emptyFunction, formatTime, OmitKeys, returnFalse, returnOne, setupMoveUpEvents, Utils } from "../../../Utils"; import { Docs, DocUtils } from "../../documents/Documents"; +import { DocumentType } from "../../documents/DocumentTypes"; import { Networking } from "../../Network"; import { CurrentUserUtils } from "../../util/CurrentUserUtils"; +import { DocumentManager } from "../../util/DocumentManager"; import { SelectionManager } from "../../util/SelectionManager"; import { SnappingManager } from "../../util/SnappingManager"; +import { undoBatch } from "../../util/UndoManager"; import { CollectionFreeFormView } from "../collections/collectionFreeForm/CollectionFreeFormView"; import { CollectionStackedTimeline, TrimScope } from "../collections/CollectionStackedTimeline"; import { ContextMenu } from "../ContextMenu"; @@ -22,16 +26,10 @@ import { ContextMenuProps } from "../ContextMenuItem"; import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from "../DocComponent"; import { DocumentDecorations } from "../DocumentDecorations"; import { MarqueeAnnotator } from "../MarqueeAnnotator"; +import { AnchorMenu } from "../pdf/AnchorMenu"; import { StyleProp } from "../StyleProvider"; import { FieldView, FieldViewProps } from './FieldView'; -import { LinkDocPreview } from "./LinkDocPreview"; import "./VideoBox.scss"; -import { DragManager } from "../../util/DragManager"; -import { DocumentManager } from "../../util/DocumentManager"; -import { DocumentType } from "../../documents/DocumentTypes"; -import { Tooltip } from "@material-ui/core"; -import { AnchorMenu } from "../pdf/AnchorMenu"; -import { undoBatch } from "../../util/UndoManager"; const path = require('path'); type VideoDocument = makeInterface<[typeof documentSchema]>; @@ -40,14 +38,30 @@ const VideoDocument = makeInterface(documentSchema); @observer export class VideoBox extends ViewBoxAnnotatableComponent(VideoDocument) { public static LayoutString(fieldKey: string) { return FieldView.LayoutString(VideoBox, fieldKey); } + static async convertDataUri(imageUri: string, returnedFilename: string) { + try { + const posting = Utils.prepend("/uploadURI"); + const returnedUri = await rp.post(posting, { + body: { + uri: imageUri, + name: returnedFilename + }, + json: true, + }); + return returnedUri; + + } catch (e) { + console.log("VideoBox :" + e); + } + } static _youtubeIframeCounter: number = 0; - static Instance: VideoBox; static heightPercent = 60; // height of timeline in percent of height of videoBox. private _disposers: { [name: string]: IReactionDisposer } = {}; private _youtubePlayer: YT.Player | undefined = undefined; private _videoRef: HTMLVideoElement | null = null; private _youtubeIframeId: number = -1; private _youtubeContentCreated = false; + private _audioPlayer: HTMLAudioElement | null = null; private _stackedTimeline = React.createRef(); private _mainCont: React.RefObject = React.createRef(); private _annotationLayer: React.RefObject = React.createRef(); @@ -62,47 +76,52 @@ export class VideoBox extends ViewBoxAnnotatableComponent arr[arr.length - 1])(field.url.href.split("/")) : ""; + } + // returns the path of the audio file + @computed get audiopath() { + const field = Cast(this.props.Document[this.props.fieldKey + '-audio'], AudioField, null); + const vfield = Cast(this.dataDoc[this.fieldKey], VideoField, null); + return field?.url.href ?? vfield?.url.href ?? ""; } + private get timeline() { return this._stackedTimeline.current; } private get transition() { return this._clicking ? "left 0.5s, width 0.5s, height 0.5s" : ""; } public get player(): HTMLVideoElement | null { return this._videoRef; } - constructor(props: Readonly) { - super(props); - VideoBox.Instance = this; - } - - getAnchor = () => { - const timecode = Cast(this.layoutDoc._currentTimecode, "number", null); - const marquee = AnchorMenu.Instance.GetAnchor?.(); - return CollectionStackedTimeline.createAnchor(this.rootDoc, this.dataDoc, this.annotationKey, "_timecodeToShow"/* videoStart */, "_timecodeToHide" /* videoEnd */, timecode ? timecode : undefined, undefined, marquee) || this.rootDoc; + componentDidMount() { + this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link. + if (this.youtubeVideoId) { + const youtubeaspect = 400 / 315; + const nativeWidth = Doc.NativeWidth(this.layoutDoc); + const nativeHeight = Doc.NativeHeight(this.layoutDoc); + if (!nativeWidth || !nativeHeight) { + if (!nativeWidth) Doc.SetNativeWidth(this.dataDoc, 600); + Doc.SetNativeHeight(this.dataDoc, (nativeWidth || 600) / youtubeaspect); + this.layoutDoc._height = (this.layoutDoc._width || 0) / youtubeaspect; + } + } } - videoLoad = () => { - const aspect = this.player!.videoWidth / this.player!.videoHeight; - Doc.SetNativeWidth(this.dataDoc, this.player!.videoWidth); - Doc.SetNativeHeight(this.dataDoc, this.player!.videoHeight); - this.layoutDoc._height = (this.layoutDoc._width || 0) / aspect; - if (Number.isFinite(this.player!.duration)) { - this.dataDoc[this.fieldKey + "-duration"] = this.player!.duration; - } + componentWillUnmount() { + this.Pause(); + Object.keys(this._disposers).forEach(d => this._disposers[d]?.()); } @action public Play = (update: boolean = true) => { this._playing = true; + const eleTime = this.player?.currentTime || 0; + const start = eleTime >= (this.timeline?.trimEnd || 0) ? this.timeline?.trimStart || 0 : eleTime; try { this._audioPlayer && this.player && (this._audioPlayer.currentTime = this.player?.currentTime); - update && this.player && this.playFrom(this.player.currentTime); + update && this.player && this.playFrom(start, undefined, true); update && this._audioPlayer?.play(); update && this._youtubePlayer?.playVideo(); this._youtubePlayer && !this._playTimer && (this._playTimer = setInterval(this.updateTimecode, 5)); @@ -190,7 +209,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent { + createRealSummaryLink = (imagePath: string, downX?: number, downY?: number) => { const url = !imagePath.startsWith("/") ? Utils.CorsProxy(imagePath) : imagePath; const width = this.layoutDoc._width || 1; const height = this.layoutDoc._height || 0; @@ -208,11 +227,25 @@ export class VideoBox extends ViewBoxAnnotatableComponent { + const timecode = Cast(this.layoutDoc._currentTimecode, "number", null); + const marquee = AnchorMenu.Instance.GetAnchor?.(); + return CollectionStackedTimeline.createAnchor(this.rootDoc, this.dataDoc, this.annotationKey, "_timecodeToShow"/* videoStart */, "_timecodeToHide" /* videoEnd */, timecode ? timecode : undefined, undefined, marquee) || this.rootDoc; + } + + videoLoad = () => { + const aspect = this.player!.videoWidth / this.player!.videoHeight; + Doc.SetNativeWidth(this.dataDoc, this.player!.videoWidth); + Doc.SetNativeHeight(this.dataDoc, this.player!.videoHeight); + this.layoutDoc._height = (this.layoutDoc._width || 0) / aspect; + if (Number.isFinite(this.player!.duration)) { + this.dataDoc[this.fieldKey + "-duration"] = this.player!.duration; + } + } + @action updateTimecode = () => { this.player && (this.layoutDoc._currentTimecode = this.player.currentTime); - this.layoutDoc.clipEnd = this.layoutDoc.clipEnd ? Math.min(this.duration, NumCast(this.layoutDoc.clipEnd)) : this.duration; - this._trimEnd = this._trimEnd ? Math.min(this.duration, this._trimEnd) : this.duration; try { this._youtubePlayer && (this.layoutDoc._currentTimecode = this._youtubePlayer.getCurrentTime?.()); } catch (e) { @@ -220,25 +253,6 @@ export class VideoBox extends ViewBoxAnnotatableComponent this._disposers[d]?.()); - } - @action setVideoRef = (vref: HTMLVideoElement | null) => { this._videoRef = vref; @@ -252,23 +266,6 @@ export class VideoBox extends ViewBoxAnnotatableComponent { const field = Cast(this.dataDoc[this.props.fieldKey], VideoField); if (field) { @@ -294,48 +291,8 @@ export class VideoBox extends ViewBoxAnnotatableComponent this._audioPlayer = e; - @computed get content() { - const field = Cast(this.dataDoc[this.fieldKey], VideoField); - const interactive = CurrentUserUtils.SelectedTool !== InkTool.None || !this.props.isSelected() ? "" : "-interactive"; - const style = "videoBox-content" + (this._fullScreen ? "-fullScreen" : "") + interactive; - return !field ?
Loading
: -
-
- - {!this.audiopath || this.audiopath === field.url.href ? (null) : - } -
-
; - } - - @computed get youtubeVideoId() { - const field = Cast(this.dataDoc[this.props.fieldKey], VideoField); - return field && field.url.href.indexOf("youtube") !== -1 ? ((arr: string[]) => arr[arr.length - 1])(field.url.href.split("/")) : ""; - } - - @action youtubeIframeLoaded = (e: any) => { + @action + youtubeIframeLoaded = (e: any) => { if (!this._youtubeContentCreated) { this._forceCreateYouTubeIFrame = !this._forceCreateYouTubeIFrame; return; @@ -344,7 +301,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent { + loadYouTube = (iframe: any) => { let started = true; const onYoutubePlayerStateChange = (event: any) => runInAction(() => { if (started && event.data === YT.PlayerState.PLAYING) { @@ -376,39 +333,6 @@ export class VideoBox extends ViewBoxAnnotatableComponent{"playback"}
} key="play" placement="bottom"> -
- -
- , - {"timecode"}
} key="time" placement="bottom"> -
- {formatTime(curTime)} - {" " + Math.floor((curTime - Math.trunc(curTime)) * 100).toString().padStart(2, "0")} -
- , - {"view full screen"}
} key="full" placement="bottom"> -
- -
- ]; - return
- {[...(VideoBox._nativeControls ? [] : nonNativeControls), - {"snapshot current frame"}
} key="snap" placement="bottom"> -
- -
- , - {"show annotation timeline"}
} key="timeline" placement="bottom"> -
- -
- ,]} -
; - } onPlayDown = () => this._playing ? this.Pause() : this.Play(); @@ -425,10 +349,11 @@ export class VideoBox extends ViewBoxAnnotatableComponent this.Snapshot()); } - onTimelineHdlDown = action((e: React.PointerEvent) => { + @action + onTimelineHdlDown = (e: React.PointerEvent) => { this._clicking = true; setupMoveUpEvents(this, e, - action((e: PointerEvent) => { + action(encodeURIComponent => { this._clicking = false; if (this.props.isContentActive()) { const local = this.props.ScreenToLocalTransform().scale(this.props.scaling?.() || 1).transformPoint(e.clientX, e.clientY); @@ -440,28 +365,17 @@ export class VideoBox extends ViewBoxAnnotatableComponent this._clicking = false), 500); }, this.props.isContentActive(), this.props.isContentActive()); - }); + } onResetDown = (e: React.PointerEvent) => { setupMoveUpEvents(this, e, - (e: PointerEvent) => { + e => { this.Seek(Math.max(0, (this.layoutDoc._currentTimecode || 0) + Math.sign(e.movementX) * 0.0333)); e.stopImmediatePropagation(); return false; }, emptyFunction, - (e: PointerEvent) => this.layoutDoc._currentTimecode = 0); - } - - @computed get youtubeContent() { - this._youtubeIframeId = VideoBox._youtubeIframeCounter++; - this._youtubeContentCreated = this._forceCreateYouTubeIFrame ? true : true; - const style = "videoBox-content-YouTube" + (this._fullScreen ? "-fullScreen" : ""); - const start = untracked(() => Math.round((this.layoutDoc._currentTimecode || 0))); - return