diff options
Diffstat (limited to 'src/client/views/nodes/VideoBox.tsx')
-rw-r--r-- | src/client/views/nodes/VideoBox.tsx | 463 |
1 files changed, 234 insertions, 229 deletions
diff --git a/src/client/views/nodes/VideoBox.tsx b/src/client/views/nodes/VideoBox.tsx index 440ccf638..23c90de8a 100644 --- a/src/client/views/nodes/VideoBox.tsx +++ b/src/client/views/nodes/VideoBox.tsx @@ -1,5 +1,6 @@ import React = require("react"); import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; +import { Tooltip } from "@material-ui/core"; import { action, computed, IReactionDisposer, observable, ObservableMap, reaction, runInAction, untracked } from "mobx"; import { observer } from "mobx-react"; import * as rp from 'request-promise'; @@ -8,29 +9,27 @@ import { documentSchema } from "../../../fields/documentSchemas"; import { InkTool } from "../../../fields/InkField"; import { makeInterface } from "../../../fields/Schema"; import { Cast, NumCast, StrCast } from "../../../fields/Types"; -import { AudioField, nullAudio, VideoField } from "../../../fields/URLField"; -import { emptyFunction, formatTime, OmitKeys, returnOne, setupMoveUpEvents, Utils, returnFalse } from "../../../Utils"; +import { AudioField, VideoField } from "../../../fields/URLField"; +import { emptyFunction, formatTime, OmitKeys, returnFalse, returnOne, setupMoveUpEvents, Utils } from "../../../Utils"; import { Docs, DocUtils } from "../../documents/Documents"; +import { DocumentType } from "../../documents/DocumentTypes"; import { Networking } from "../../Network"; import { CurrentUserUtils } from "../../util/CurrentUserUtils"; +import { DocumentManager } from "../../util/DocumentManager"; import { SelectionManager } from "../../util/SelectionManager"; import { SnappingManager } from "../../util/SnappingManager"; +import { undoBatch } from "../../util/UndoManager"; import { CollectionFreeFormView } from "../collections/collectionFreeForm/CollectionFreeFormView"; -import { CollectionStackedTimeline } from "../collections/CollectionStackedTimeline"; +import { CollectionStackedTimeline, TrimScope } from "../collections/CollectionStackedTimeline"; import { ContextMenu } from "../ContextMenu"; import { ContextMenuProps } from "../ContextMenuItem"; import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from "../DocComponent"; import { DocumentDecorations } from "../DocumentDecorations"; import { MarqueeAnnotator } from "../MarqueeAnnotator"; +import { AnchorMenu } from "../pdf/AnchorMenu"; import { StyleProp } from "../StyleProvider"; import { FieldView, FieldViewProps } from './FieldView'; -import { LinkDocPreview } from "./LinkDocPreview"; import "./VideoBox.scss"; -import { DragManager } from "../../util/DragManager"; -import { DocumentManager } from "../../util/DocumentManager"; -import { DocumentType } from "../../documents/DocumentTypes"; -import { Tooltip } from "@material-ui/core"; -import { AnchorMenu } from "../pdf/AnchorMenu"; const path = require('path'); type VideoDocument = makeInterface<[typeof documentSchema]>; @@ -39,14 +38,30 @@ const VideoDocument = makeInterface(documentSchema); @observer export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProps & FieldViewProps, VideoDocument>(VideoDocument) { public static LayoutString(fieldKey: string) { return FieldView.LayoutString(VideoBox, fieldKey); } + static async convertDataUri(imageUri: string, returnedFilename: string) { + try { + const posting = Utils.prepend("/uploadURI"); + const returnedUri = await rp.post(posting, { + body: { + uri: imageUri, + name: returnedFilename + }, + json: true, + }); + return returnedUri; + + } catch (e) { + console.log("VideoBox :" + e); + } + } static _youtubeIframeCounter: number = 0; - static Instance: VideoBox; static heightPercent = 60; // height of timeline in percent of height of videoBox. private _disposers: { [name: string]: IReactionDisposer } = {}; private _youtubePlayer: YT.Player | undefined = undefined; private _videoRef: HTMLVideoElement | null = null; private _youtubeIframeId: number = -1; private _youtubeContentCreated = false; + private _audioPlayer: HTMLAudioElement | null = null; private _stackedTimeline = React.createRef<CollectionStackedTimeline>(); private _mainCont: React.RefObject<HTMLDivElement> = React.createRef(); private _annotationLayer: React.RefObject<HTMLDivElement> = React.createRef(); @@ -61,39 +76,53 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp @observable _playTimer?: NodeJS.Timeout = undefined; @observable _fullScreen = false; @observable _playing = false; + @computed get links() { return DocListCast(this.dataDoc.links); } @computed get heightPercent() { return NumCast(this.layoutDoc._timelineHeightPercent, 100); } - @computed get duration() { return NumCast(this.dataDoc[this.fieldKey + "-duration"]); } + @computed get rawDuration() { return NumCast(this.dataDoc[this.fieldKey + "-duration"]); } + @computed get youtubeVideoId() { + const field = Cast(this.dataDoc[this.props.fieldKey], VideoField); + return field && field.url.href.indexOf("youtube") !== -1 ? ((arr: string[]) => arr[arr.length - 1])(field.url.href.split("/")) : ""; + } + // returns the path of the audio file + @computed get audiopath() { + const field = Cast(this.props.Document[this.props.fieldKey + '-audio'], AudioField, null); + const vfield = Cast(this.dataDoc[this.fieldKey], VideoField, null); + return field?.url.href ?? vfield?.url.href ?? ""; + } + + private get timeline() { return this._stackedTimeline.current; } private get transition() { return this._clicking ? "left 0.5s, width 0.5s, height 0.5s" : ""; } public get player(): HTMLVideoElement | null { return this._videoRef; } - constructor(props: Readonly<ViewBoxAnnotatableProps & FieldViewProps>) { - super(props); - VideoBox.Instance = this; - } - - getAnchor = () => { - const timecode = Cast(this.layoutDoc._currentTimecode, "number", null); - const marquee = AnchorMenu.Instance.GetAnchor?.(); - return CollectionStackedTimeline.createAnchor(this.rootDoc, this.dataDoc, this.annotationKey, "_timecodeToShow"/* videoStart */, "_timecodeToHide" /* videoEnd */, timecode ? timecode : undefined, undefined, marquee) || this.rootDoc; + componentDidMount() { + this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link. + if (this.youtubeVideoId) { + const youtubeaspect = 400 / 315; + const nativeWidth = Doc.NativeWidth(this.layoutDoc); + const nativeHeight = Doc.NativeHeight(this.layoutDoc); + if (!nativeWidth || !nativeHeight) { + if (!nativeWidth) Doc.SetNativeWidth(this.dataDoc, 600); + Doc.SetNativeHeight(this.dataDoc, (nativeWidth || 600) / youtubeaspect); + this.layoutDoc._height = (this.layoutDoc._width || 0) / youtubeaspect; + } + } + this.player && this.setPlayheadTime(0); } - videoLoad = () => { - const aspect = this.player!.videoWidth / this.player!.videoHeight; - Doc.SetNativeWidth(this.dataDoc, this.player!.videoWidth); - Doc.SetNativeHeight(this.dataDoc, this.player!.videoHeight); - this.layoutDoc._height = (this.layoutDoc._width || 0) / aspect; - if (Number.isFinite(this.player!.duration)) { - this.dataDoc[this.fieldKey + "-duration"] = this.player!.duration; - } + componentWillUnmount() { + this.Pause(); + Object.keys(this._disposers).forEach(d => this._disposers[d]?.()); } @action public Play = (update: boolean = true) => { this._playing = true; + const eleTime = this.player?.currentTime || 0; + const start = eleTime >= (this.timeline?.trimEnd || 0) ? this.timeline?.trimStart || 0 : eleTime; try { this._audioPlayer && this.player && (this._audioPlayer.currentTime = this.player?.currentTime); - update && this.player?.play(); + update && this.player && this.playFrom(start, undefined, true); update && this._audioPlayer?.play(); update && this._youtubePlayer?.playVideo(); this._youtubePlayer && !this._playTimer && (this._playTimer = setInterval(this.updateTimecode, 5)); @@ -181,7 +210,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp } } - private createRealSummaryLink = (imagePath: string, downX?: number, downY?: number) => { + createRealSummaryLink = (imagePath: string, downX?: number, downY?: number) => { const url = !imagePath.startsWith("/") ? Utils.CorsProxy(imagePath) : imagePath; const width = this.layoutDoc._width || 1; const height = this.layoutDoc._height || 0; @@ -199,6 +228,22 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp (downX !== undefined && downY !== undefined) && DocumentManager.Instance.getFirstDocumentView(imageSummary)?.startDragging(downX, downY, "move", true)); } + getAnchor = () => { + const timecode = Cast(this.layoutDoc._currentTimecode, "number", null); + const marquee = AnchorMenu.Instance.GetAnchor?.(); + return CollectionStackedTimeline.createAnchor(this.rootDoc, this.dataDoc, this.annotationKey, "_timecodeToShow"/* videoStart */, "_timecodeToHide" /* videoEnd */, timecode ? timecode : undefined, undefined, marquee) || this.rootDoc; + } + + videoLoad = () => { + const aspect = this.player!.videoWidth / this.player!.videoHeight; + Doc.SetNativeWidth(this.dataDoc, this.player!.videoWidth); + Doc.SetNativeHeight(this.dataDoc, this.player!.videoHeight); + this.layoutDoc._height = (this.layoutDoc._width || 0) / aspect; + if (Number.isFinite(this.player!.duration)) { + this.dataDoc[this.fieldKey + "-duration"] = this.player!.duration; + } + } + @action updateTimecode = () => { this.player && (this.layoutDoc._currentTimecode = this.player.currentTime); @@ -209,41 +254,6 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp } } - componentDidMount() { - this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link. - this._disposers.triggerVideo = reaction( - () => !LinkDocPreview.LinkInfo && this.props.renderDepth !== -1 ? NumCast(this.Document._triggerVideo, null) : undefined, - time => time !== undefined && setTimeout(() => { - this.player && this.Play(); - setTimeout(() => this.Document._triggerVideo = undefined, 10); - }, this.player ? 0 : 250), // wait for mainCont and try again to play - { fireImmediately: true } - ); - this._disposers.triggerStop = reaction( - () => this.props.renderDepth !== -1 && !LinkDocPreview.LinkInfo ? NumCast(this.Document._triggerVideoStop, null) : undefined, - stop => stop !== undefined && setTimeout(() => { - this.player && this.Pause(); - setTimeout(() => this.Document._triggerVideoStop = undefined, 10); - }, this.player ? 0 : 250), // wait for mainCont and try again to play - { fireImmediately: true } - ); - if (this.youtubeVideoId) { - const youtubeaspect = 400 / 315; - const nativeWidth = Doc.NativeWidth(this.layoutDoc); - const nativeHeight = Doc.NativeHeight(this.layoutDoc); - if (!nativeWidth || !nativeHeight) { - if (!nativeWidth) Doc.SetNativeWidth(this.dataDoc, 600); - Doc.SetNativeHeight(this.dataDoc, (nativeWidth || 600) / youtubeaspect); - this.layoutDoc._height = (this.layoutDoc._width || 0) / youtubeaspect; - } - } - } - - componentWillUnmount() { - this.Pause(); - Object.keys(this._disposers).forEach(d => this._disposers[d]?.()); - } - @action setVideoRef = (vref: HTMLVideoElement | null) => { this._videoRef = vref; @@ -257,23 +267,6 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp } } - public static async convertDataUri(imageUri: string, returnedFilename: string) { - try { - const posting = Utils.prepend("/uploadURI"); - const returnedUri = await rp.post(posting, { - body: { - uri: imageUri, - name: returnedFilename - }, - json: true, - }); - return returnedUri; - - } catch (e) { - console.log("VideoBox :" + e); - } - } - specificContextMenu = (e: React.MouseEvent): void => { const field = Cast(this.dataDoc[this.props.fieldKey], VideoField); if (field) { @@ -287,56 +280,20 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp this._videoRef!.srcObject = !this._screenCapture ? undefined : await (navigator.mediaDevices as any).getDisplayMedia({ video: true }); }), icon: "expand-arrows-alt" }); + subitems.push({ description: (this.layoutDoc.dontAutoFollowLinks ? "" : "Don't") + " follow links when encountered", event: () => this.layoutDoc.dontAutoFollowLinks = !this.layoutDoc.dontAutoFollowLinks, icon: "expand-arrows-alt" }); subitems.push({ description: (this.layoutDoc.dontAutoPlayFollowedLinks ? "" : "Don't") + " play when link is selected", event: () => this.layoutDoc.dontAutoPlayFollowedLinks = !this.layoutDoc.dontAutoPlayFollowedLinks, icon: "expand-arrows-alt" }); subitems.push({ description: (this.layoutDoc.autoPlayAnchors ? "Don't auto play" : "Auto play") + " anchors onClick", event: () => this.layoutDoc.autoPlayAnchors = !this.layoutDoc.autoPlayAnchors, icon: "expand-arrows-alt" }); subitems.push({ description: "Toggle Native Controls", event: action(() => VideoBox._nativeControls = !VideoBox._nativeControls), icon: "expand-arrows-alt" }); + subitems.push({ description: "Start Trim All", event: () => this.startTrim(TrimScope.All), icon: "expand-arrows-alt" }); + subitems.push({ description: "Start Trim Clip", event: () => this.startTrim(TrimScope.Clip), icon: "expand-arrows-alt" }); + subitems.push({ description: "Stop Trim", event: () => this.finishTrim(), icon: "expand-arrows-alt" }); subitems.push({ description: "Copy path", event: () => { Utils.CopyText(url); }, icon: "expand-arrows-alt" }); ContextMenu.Instance.addItem({ description: "Options...", subitems: subitems, icon: "video" }); } } - // returns the path of the audio file - @computed get audiopath() { - const field = Cast(this.props.Document[this.props.fieldKey + '-audio'], AudioField, null); - const vfield = Cast(this.dataDoc[this.fieldKey], VideoField, null); - return field?.url.href ?? vfield?.url.href ?? ""; - } - // ref for updating time - _audioPlayer: HTMLAudioElement | null = null; - setAudioRef = (e: HTMLAudioElement | null) => this._audioPlayer = e; - @computed get content() { - const field = Cast(this.dataDoc[this.fieldKey], VideoField); - const interactive = CurrentUserUtils.SelectedTool !== InkTool.None || !this.props.isSelected() ? "" : "-interactive"; - const style = "videoBox-content" + (this._fullScreen ? "-fullScreen" : "") + interactive; - return !field ? <div key="loading">Loading</div> : - <div className="container" key="container" style={{ mixBlendMode: "multiply", pointerEvents: this.props.isContentActive() ? "all" : "none" }}> - <div className={`${style}`} style={{ width: "100%", height: "100%", left: "0px" }}> - <video key="video" autoPlay={this._screenCapture} ref={this.setVideoRef} - style={{ height: "100%", width: "auto", display: "flex", margin: "auto" }} - onCanPlay={this.videoLoad} - controls={VideoBox._nativeControls} - onPlay={() => this.Play()} - onSeeked={this.updateTimecode} - onPause={() => this.Pause()} - onClick={e => e.preventDefault()}> - <source src={field.url.href} type="video/mp4" /> - Not supported. - </video> - {!this.audiopath || this.audiopath === field.url.href ? (null) : - <audio ref={this.setAudioRef} className={`audiobox-control${this.props.isContentActive() ? "-interactive" : ""}`}> - <source src={this.audiopath} type="audio/mpeg" /> - Not supported. - </audio>} - </div> - </div>; - } - - @computed get youtubeVideoId() { - const field = Cast(this.dataDoc[this.props.fieldKey], VideoField); - return field && field.url.href.indexOf("youtube") !== -1 ? ((arr: string[]) => arr[arr.length - 1])(field.url.href.split("/")) : ""; - } - - @action youtubeIframeLoaded = (e: any) => { + @action + youtubeIframeLoaded = (e: any) => { if (!this._youtubeContentCreated) { this._forceCreateYouTubeIFrame = !this._forceCreateYouTubeIFrame; return; @@ -345,7 +302,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp this.loadYouTube(e.target); } - private loadYouTube = (iframe: any) => { + loadYouTube = (iframe: any) => { let started = true; const onYoutubePlayerStateChange = (event: any) => runInAction(() => { if (started && event.data === YT.PlayerState.PLAYING) { @@ -377,39 +334,6 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp }); } } - private get uIButtons() { - const curTime = (this.layoutDoc._currentTimecode || 0); - const nonNativeControls = [ - <Tooltip title={<div className="dash-tooltip">{"playback"}</div>} key="play" placement="bottom"> - <div className="videoBox-play" onPointerDown={this.onPlayDown} > - <FontAwesomeIcon icon={this._playing ? "pause" : "play"} size="lg" /> - </div> - </Tooltip>, - <Tooltip title={<div className="dash-tooltip">{"timecode"}</div>} key="time" placement="bottom"> - <div className="videoBox-time" onPointerDown={this.onResetDown} > - <span>{formatTime(curTime)}</span> - <span style={{ fontSize: 8 }}>{" " + Math.floor((curTime - Math.trunc(curTime)) * 100).toString().padStart(2, "0")}</span> - </div> - </Tooltip>, - <Tooltip title={<div className="dash-tooltip">{"view full screen"}</div>} key="full" placement="bottom"> - <div className="videoBox-full" onPointerDown={this.FullScreen}> - <FontAwesomeIcon icon="expand" size="lg" /> - </div> - </Tooltip>]; - return <div className="videoBox-ui"> - {[...(VideoBox._nativeControls ? [] : nonNativeControls), - <Tooltip title={<div className="dash-tooltip">{"snapshot current frame"}</div>} key="snap" placement="bottom"> - <div className="videoBox-snapshot" onPointerDown={this.onSnapshotDown} > - <FontAwesomeIcon icon="camera" size="lg" /> - </div> - </Tooltip>, - <Tooltip title={<div className="dash-tooltip">{"show annotation timeline"}</div>} key="timeline" placement="bottom"> - <div className="videoBox-timelineButton" onPointerDown={this.onTimelineHdlDown}> - <FontAwesomeIcon icon="eye" size="lg" /> - </div> - </Tooltip>,]} - </div>; - } onPlayDown = () => this._playing ? this.Pause() : this.Play(); @@ -426,10 +350,11 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp }, emptyFunction, () => this.Snapshot()); } - onTimelineHdlDown = action((e: React.PointerEvent) => { + @action + onTimelineHdlDown = (e: React.PointerEvent) => { this._clicking = true; setupMoveUpEvents(this, e, - action((e: PointerEvent) => { + action(encodeURIComponent => { this._clicking = false; if (this.props.isContentActive()) { const local = this.props.ScreenToLocalTransform().scale(this.props.scaling?.() || 1).transformPoint(e.clientX, e.clientY); @@ -441,28 +366,17 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp this.layoutDoc._timelineHeightPercent = this.heightPercent !== 100 ? 100 : VideoBox.heightPercent; setTimeout(action(() => this._clicking = false), 500); }, this.props.isContentActive(), this.props.isContentActive()); - }); + } onResetDown = (e: React.PointerEvent) => { setupMoveUpEvents(this, e, - (e: PointerEvent) => { + e => { this.Seek(Math.max(0, (this.layoutDoc._currentTimecode || 0) + Math.sign(e.movementX) * 0.0333)); e.stopImmediatePropagation(); return false; }, emptyFunction, - (e: PointerEvent) => this.layoutDoc._currentTimecode = 0); - } - - @computed get youtubeContent() { - this._youtubeIframeId = VideoBox._youtubeIframeCounter++; - this._youtubeContentCreated = this._forceCreateYouTubeIFrame ? true : true; - const style = "videoBox-content-YouTube" + (this._fullScreen ? "-fullScreen" : ""); - const start = untracked(() => Math.round((this.layoutDoc._currentTimecode || 0))); - return <iframe key={this._youtubeIframeId} id={`${this.youtubeVideoId + this._youtubeIframeId}-player`} - onPointerLeave={this.updateTimecode} - onLoad={this.youtubeIframeLoaded} className={`${style}`} width={Doc.NativeWidth(this.layoutDoc) || 640} height={Doc.NativeHeight(this.layoutDoc) || 390} - src={`https://www.youtube.com/embed/${this.youtubeVideoId}?enablejsapi=1&rel=0&showinfo=1&autoplay=0&mute=1&start=${start}&modestbranding=1&controls=${VideoBox._nativeControls ? 1 : 0}`} />; + e => this.layoutDoc._currentTimecode = 0); } @action.bound @@ -475,47 +389,173 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp // play back the video from time @action - playFrom = (seekTimeInSeconds: number, endTime: number = this.duration) => { + playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false) => { clearTimeout(this._playRegionTimer); - this._playRegionDuration = endTime - seekTimeInSeconds; if (Number.isNaN(this.player?.duration)) { setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500); - } else if (this.player) { - if (seekTimeInSeconds < 0) { - if (seekTimeInSeconds > -1) { - setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000); - } else { - this.Pause(); - } - } else if (seekTimeInSeconds <= this.player.duration) { - this.player.currentTime = seekTimeInSeconds; + } + else if (this.player) { + const end = Math.min(this.timeline?.trimEnd ?? this.rawDuration, endTime ?? this.timeline?.trimEnd ?? this.rawDuration); + const start = Math.max(this.timeline?.trimStart ?? 0, seekTimeInSeconds); + this._playRegionDuration = end - start; + if (seekTimeInSeconds >= 0 && (this.timeline?.trimStart || 0) <= end && seekTimeInSeconds <= (this.timeline?.trimEnd || this.rawDuration)) { + this.player.currentTime = start; this._audioPlayer && (this._audioPlayer.currentTime = seekTimeInSeconds); this.player.play(); this._audioPlayer?.play(); - runInAction(() => this._playing = true); - if (endTime !== this.duration) { - this._playRegionTimer = setTimeout(() => this.Pause(), (this._playRegionDuration) * 1000); // use setTimeout to play a specific duration - } + this._playing = true; + this._playRegionTimer = setTimeout( + () => { + if (fullPlay) this.setPlayheadTime(this.timeline?.trimStart || 0); + this.Pause(); + }, this._playRegionDuration * 1000); } else { this.Pause(); } } } + // hides trim controls and displays new clip + @undoBatch + finishTrim = action(() => { + this.Pause(); + this.setPlayheadTime(Math.max(Math.min(this.timeline?.trimEnd || 0, this.player!.currentTime), this.timeline?.trimStart || 0)); + this.timeline?.StopTrimming(); + }); + + startTrim = (scope: TrimScope) => { + this.Pause(); + this.timeline?.StartTrimming(scope); + } + + onClipPointerDown = (e: React.PointerEvent) => { + this.heightPercent >= 100 && this.onTimelineHdlDown(e); + this.timeline && setupMoveUpEvents(this, e, returnFalse, returnFalse, action((e: PointerEvent, doubleTap?: boolean) => { + if (doubleTap) { + this.startTrim(TrimScope.All); + } else if (this.timeline) { + this.Pause(); + this.timeline.IsTrimming !== TrimScope.None ? this.finishTrim() : this.startTrim(TrimScope.Clip); + } + })); + } playLink = (doc: Doc) => { const startTime = Math.max(0, (this._stackedTimeline.current?.anchorStart(doc) || 0)); - const endTime = this._stackedTimeline.current?.anchorEnd(doc); + const endTime = this.timeline?.anchorEnd(doc); if (startTime !== undefined) { if (!this.layoutDoc.dontAutoPlayFollowedLinks) endTime ? this.playFrom(startTime, endTime) : this.playFrom(startTime); else this.Seek(startTime); } } - playing = () => this._playing; + marqueeDown = (e: React.PointerEvent) => { + if (!e.altKey && e.button === 0 && this.layoutDoc._viewScale === 1 && this.props.isContentActive(true) && ![InkTool.Highlighter, InkTool.Pen].includes(CurrentUserUtils.SelectedTool)) { + setupMoveUpEvents(this, e, action(e => { + MarqueeAnnotator.clearAnnotations(this._savedAnnotations); + this._marqueeing = [e.clientX, e.clientY]; + return true; + }), returnFalse, () => MarqueeAnnotator.clearAnnotations(this._savedAnnotations), false); + } + } + + @action + finishMarquee = () => { + this._marqueeing = undefined; + this.props.select(true); + } + timelineWhenChildContentsActiveChanged = action((isActive: boolean) => this.props.whenChildContentsActiveChanged(this._isAnyChildContentActive = isActive)); timelineScreenToLocal = () => this.props.ScreenToLocalTransform().scale(this.scaling()).translate(0, -this.heightPercent / 100 * this.props.PanelHeight()); - setAnchorTime = (time: number) => this.player!.currentTime = this.layoutDoc._currentTimecode = time; + setPlayheadTime = (time: number) => this.player!.currentTime = this.layoutDoc._currentTimecode = time; timelineHeight = () => this.props.PanelHeight() * (100 - this.heightPercent) / 100; + playing = () => this._playing; + + contentFunc = () => [this.youtubeVideoId ? this.youtubeContent : this.content]; + scaling = () => this.props.scaling?.() || 1; + panelWidth = () => this.props.PanelWidth() * this.heightPercent / 100; + panelHeight = () => this.layoutDoc._fitWidth ? this.panelWidth() / (Doc.NativeAspect(this.rootDoc) || 1) : this.props.PanelHeight() * this.heightPercent / 100; + screenToLocalTransform = () => { + const offset = (this.props.PanelWidth() - this.panelWidth()) / 2 / this.scaling(); + return this.props.ScreenToLocalTransform().translate(-offset, 0).scale(100 / this.heightPercent); + } + marqueeFitScaling = () => (this.props.scaling?.() || 1) * this.heightPercent / 100; + marqueeOffset = () => [this.panelWidth() / 2 * (1 - this.heightPercent / 100) / (this.heightPercent / 100), 0]; + timelineDocFilter = () => [`_timelineLabel:true,${Utils.noRecursionHack}:x`]; + + @computed get content() { + const field = Cast(this.dataDoc[this.fieldKey], VideoField); + const interactive = CurrentUserUtils.SelectedTool !== InkTool.None || !this.props.isSelected() ? "" : "-interactive"; + const style = "videoBox-content" + (this._fullScreen ? "-fullScreen" : "") + interactive; + return !field ? <div key="loading">Loading</div> : + <div className="container" key="container" style={{ mixBlendMode: "multiply", pointerEvents: this.props.isContentActive() ? "all" : "none" }}> + <div className={`${style}`} style={{ width: "100%", height: "100%", left: "0px" }}> + <video key="video" autoPlay={this._screenCapture} ref={this.setVideoRef} + style={{ height: "100%", width: "auto", display: "flex", margin: "auto" }} + onCanPlay={this.videoLoad} + controls={VideoBox._nativeControls} + onPlay={() => this.Play()} + onSeeked={this.updateTimecode} + onPause={() => this.Pause()} + onClick={e => e.preventDefault()}> + <source src={field.url.href} type="video/mp4" /> + Not supported. + </video> + {!this.audiopath || this.audiopath === field.url.href ? (null) : + <audio ref={e => this._audioPlayer = e} className={`audiobox-control${this.props.isContentActive() ? "-interactive" : ""}`}> + <source src={this.audiopath} type="audio/mpeg" /> + Not supported. + </audio>} + </div> + </div>; + } + @computed get youtubeContent() { + this._youtubeIframeId = VideoBox._youtubeIframeCounter++; + this._youtubeContentCreated = this._forceCreateYouTubeIFrame ? true : true; + const style = "videoBox-content-YouTube" + (this._fullScreen ? "-fullScreen" : ""); + const start = untracked(() => Math.round((this.layoutDoc._currentTimecode || 0))); + return <iframe key={this._youtubeIframeId} id={`${this.youtubeVideoId + this._youtubeIframeId}-player`} + onPointerLeave={this.updateTimecode} + onLoad={this.youtubeIframeLoaded} className={`${style}`} width={Doc.NativeWidth(this.layoutDoc) || 640} height={Doc.NativeHeight(this.layoutDoc) || 390} + src={`https://www.youtube.com/embed/${this.youtubeVideoId}?enablejsapi=1&rel=0&showinfo=1&autoplay=0&mute=1&start=${start}&modestbranding=1&controls=${VideoBox._nativeControls ? 1 : 0}`} />; + } + @computed get uIButtons() { + const curTime = (this.layoutDoc._currentTimecode || 0); + const nonNativeControls = [ + <Tooltip title={<div className="dash-tooltip">{"playback"}</div>} key="play" placement="bottom"> + <div className="videoBox-play" onPointerDown={this.onPlayDown} > + <FontAwesomeIcon icon={this._playing ? "pause" : "play"} size="lg" /> + </div> + </Tooltip>, + <Tooltip title={<div className="dash-tooltip">{"timecode"}</div>} key="time" placement="bottom"> + <div className="videoBox-time" onPointerDown={this.onResetDown} > + <span>{formatTime(curTime)}</span> + <span style={{ fontSize: 8 }}>{" " + Math.floor((curTime - Math.trunc(curTime)) * 100).toString().padStart(2, "0")}</span> + </div> + </Tooltip>, + <Tooltip title={<div className="dash-tooltip">{"view full screen"}</div>} key="full" placement="bottom"> + <div className="videoBox-full" onPointerDown={this.FullScreen}> + <FontAwesomeIcon icon="expand" size="lg" /> + </div> + </Tooltip>]; + return <div className="videoBox-ui"> + {[...(VideoBox._nativeControls ? [] : nonNativeControls), + <Tooltip title={<div className="dash-tooltip">{"snapshot current frame"}</div>} key="snap" placement="bottom"> + <div className="videoBox-snapshot" onPointerDown={this.onSnapshotDown} > + <FontAwesomeIcon icon="camera" size="lg" /> + </div> + </Tooltip>, + <Tooltip title={<div className="dash-tooltip">{"show annotation timeline"}</div>} key="timeline" placement="bottom"> + <div className="videoBox-timelineButton" onPointerDown={this.onTimelineHdlDown}> + <FontAwesomeIcon icon="eye" size="lg" /> + </div> + </Tooltip>, + <Tooltip title={<div className="dash-tooltip">{this.timeline?.IsTrimming !== TrimScope.None ? "finish trimming" : "start trim"}</div>} key="trim" placement="bottom"> + <div className="videoBox-timelineButton" onPointerDown={this.onClipPointerDown}> + <FontAwesomeIcon icon={this.timeline?.IsTrimming !== TrimScope.None ? "check" : "cut"} size="lg" /> + </div> + </Tooltip>,]} + </div>; + } @computed get renderTimeline() { return <div className="videoBox-stackPanel" style={{ transition: this.transition, height: `${100 - this.heightPercent}%` }}> <CollectionStackedTimeline ref={this._stackedTimeline} {...this.props} @@ -527,9 +567,8 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp endTag={"_timecodeToHide" /* videoEnd */} bringToFront={emptyFunction} CollectionView={undefined} - duration={this.duration} playFrom={this.playFrom} - setTime={this.setAnchorTime} + setTime={this.setPlayheadTime} playing={this.playing} isAnyChildContentActive={this.isAnyChildContentActive} whenChildContentsActiveChanged={this.timelineWhenChildContentsActiveChanged} @@ -539,47 +578,13 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp Pause={this.Pause} playLink={this.playLink} PanelHeight={this.timelineHeight} - trimming={false} - trimStart={0} - trimEnd={this.duration} - trimDuration={this.duration} - setStartTrim={() => { }} - setEndTrim={() => { }} + rawDuration={this.rawDuration} /> </div>; } - @computed get annotationLayer() { return <div className="videoBox-annotationLayer" style={{ transition: this.transition, height: `${this.heightPercent}%` }} ref={this._annotationLayer} />; } - - marqueeDown = (e: React.PointerEvent) => { - if (!e.altKey && e.button === 0 && this.layoutDoc._viewScale === 1 && this.props.isContentActive(true) && ![InkTool.Highlighter, InkTool.Pen].includes(CurrentUserUtils.SelectedTool)) { - setupMoveUpEvents(this, e, action(e => { - MarqueeAnnotator.clearAnnotations(this._savedAnnotations); - this._marqueeing = [e.clientX, e.clientY]; - return true; - }), returnFalse, () => MarqueeAnnotator.clearAnnotations(this._savedAnnotations), false); - } - } - - finishMarquee = action(() => { - this._marqueeing = undefined; - this.props.select(true); - }); - - @computed get fitWidth() { return this.props.docViewPath?.().lastElement().fitWidth; } - contentFunc = () => [this.youtubeVideoId ? this.youtubeContent : this.content]; - scaling = () => this.props.scaling?.() || 1; - panelWidth = (): number => this.fitWidth ? this.props.PanelWidth() : (Doc.NativeAspect(this.rootDoc) || 1) * this.panelHeight(); - panelHeight = (): number => this.fitWidth ? this.panelWidth() / (Doc.NativeAspect(this.rootDoc) || 1) : this.heightPercent / 100 * this.props.PanelHeight(); - screenToLocalTransform = () => { - const offset = (this.props.PanelWidth() - this.panelWidth()) / 2 / this.scaling(); - return this.props.ScreenToLocalTransform().translate(-offset, 0).scale(100 / this.heightPercent); - } - marqueeFitScaling = () => (this.props.scaling?.() || 1) * this.heightPercent / 100; - marqueeOffset = () => [this.panelWidth() / 2 * (1 - this.heightPercent / 100) / (this.heightPercent / 100), 0]; - timelineDocFilter = () => [`_timelineLabel:true,${Utils.noRecursionHack}:x`]; render() { const borderRad = this.props.styleProvider?.(this.layoutDoc, this.props, StyleProp.BorderRounding); const borderRadius = borderRad?.includes("px") ? `${Number(borderRad.split("px")[0]) / this.scaling()}px` : borderRad; |