aboutsummaryrefslogtreecommitdiff
path: root/src/client/views/nodes/VideoBox.tsx
diff options
context:
space:
mode:
Diffstat (limited to 'src/client/views/nodes/VideoBox.tsx')
-rw-r--r--src/client/views/nodes/VideoBox.tsx662
1 files changed, 443 insertions, 219 deletions
diff --git a/src/client/views/nodes/VideoBox.tsx b/src/client/views/nodes/VideoBox.tsx
index 33fdc4935..e57cb1abe 100644
--- a/src/client/views/nodes/VideoBox.tsx
+++ b/src/client/views/nodes/VideoBox.tsx
@@ -1,14 +1,13 @@
import React = require("react");
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
-import { Tooltip } from "@material-ui/core";
import { action, computed, IReactionDisposer, observable, ObservableMap, reaction, runInAction, untracked } from "mobx";
import { observer } from "mobx-react";
import { basename } from "path";
import * as rp from 'request-promise';
-import { Doc, DocListCast } from "../../../fields/Doc";
+import { Doc, DocListCast, HeightSym, WidthSym } from "../../../fields/Doc";
import { InkTool } from "../../../fields/InkField";
import { Cast, NumCast, StrCast } from "../../../fields/Types";
-import { AudioField, VideoField } from "../../../fields/URLField";
+import { AudioField, ImageField, VideoField } from "../../../fields/URLField";
import { emptyFunction, formatTime, OmitKeys, returnFalse, returnOne, setupMoveUpEvents, Utils } from "../../../Utils";
import { Docs, DocUtils } from "../../documents/Documents";
import { DocumentType } from "../../documents/DocumentTypes";
@@ -17,8 +16,9 @@ import { CurrentUserUtils } from "../../util/CurrentUserUtils";
import { DocumentManager } from "../../util/DocumentManager";
import { SelectionManager } from "../../util/SelectionManager";
import { SnappingManager } from "../../util/SnappingManager";
+import { undoBatch } from "../../util/UndoManager";
import { CollectionFreeFormView } from "../collections/collectionFreeForm/CollectionFreeFormView";
-import { CollectionStackedTimeline } from "../collections/CollectionStackedTimeline";
+import { CollectionStackedTimeline, TrimScope } from "../collections/CollectionStackedTimeline";
import { ContextMenu } from "../ContextMenu";
import { ContextMenuProps } from "../ContextMenuItem";
import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from "../DocComponent";
@@ -27,76 +27,151 @@ import { MarqueeAnnotator } from "../MarqueeAnnotator";
import { AnchorMenu } from "../pdf/AnchorMenu";
import { StyleProp } from "../StyleProvider";
import { FieldView, FieldViewProps } from './FieldView';
-import { LinkDocPreview } from "./LinkDocPreview";
import "./VideoBox.scss";
+const path = require('path');
+
+
+/**
+ * VideoBox
+ * Main component: VideoBox.tsx
+ * Supporting Components: CollectionStackedTimeline
+ *
+ * VideoBox is a node that supports the playback of video files in Dash.
+ * When a video file or YouTube video is importeed into Dash, it is immediately rendered as a VideoBox document.
+ * CollectionStackedTimline handles AudioBox and VideoBox shared behavior, but VideoBox handles playing, pausing, etc because it contains <video> element
+ * User can trim video: nondestructive, just sets new bounds for playback and rendering timeline
+ * Like images, users can zoom and pan and it has an overlay layer allowing for annotations on top of the video at different times
+ */
@observer
export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProps & FieldViewProps>() {
public static LayoutString(fieldKey: string) { return FieldView.LayoutString(VideoBox, fieldKey); }
+ /**
+ * Uploads an image buffer to the server and stores with specified filename. by default the image
+ * is stored at multiple resolutions each retrieved by using the filename appended with _o, _s, _m, _l (indicating original, small, medium, or large)
+ * @param imageUri the bytes of the image
+ * @param returnedFilename the base filename to store the image on the server
+ * @param nosuffix optionally suppress creating multiple resolution images
+ */
+ public static async convertDataUri(imageUri: string, returnedFilename: string, nosuffix = false, replaceRootFilename?: string) {
+ try {
+ const posting = Utils.prepend("/uploadURI");
+ const returnedUri = await rp.post(posting, {
+ body: {
+ uri: imageUri,
+ name: returnedFilename,
+ nosuffix,
+ replaceRootFilename
+ },
+ json: true,
+ });
+ return returnedUri;
+
+ } catch (e) {
+ console.log("VideoBox :" + e);
+ }
+ }
+
static _youtubeIframeCounter: number = 0;
- static Instance: VideoBox;
- static heightPercent = 60; // height of timeline in percent of height of videoBox.
+ static heightPercent = 80; // height of video relative to videoBox when timeline is open
private _disposers: { [name: string]: IReactionDisposer } = {};
private _youtubePlayer: YT.Player | undefined = undefined;
- private _videoRef: HTMLVideoElement | null = null;
+ private _videoRef: HTMLVideoElement | null = null; // <video> ref
+ private _contentRef: HTMLDivElement | null = null; // ref to div that wraps video and controls for full screen
private _youtubeIframeId: number = -1;
private _youtubeContentCreated = false;
- private _stackedTimeline = React.createRef<CollectionStackedTimeline>();
- private _mainCont: React.RefObject<HTMLDivElement> = React.createRef();
+ private _audioPlayer: HTMLAudioElement | null = null;
+ private _mainCont: React.RefObject<HTMLDivElement> = React.createRef(); // outermost div
private _annotationLayer: React.RefObject<HTMLDivElement> = React.createRef();
- private _playRegionTimer: any = null;
- private _playRegionDuration = 0;
- @observable static _nativeControls: boolean;
- @observable _marqueeing: number[] | undefined;
+ private _playRegionTimer: any = null; // timeout for playback
+ @observable _stackedTimeline: any; // CollectionStackedTimeline ref
+ @observable static _nativeControls: boolean; // default html controls
+ @observable _marqueeing: number[] | undefined; // coords for marquee selection
@observable _savedAnnotations = new ObservableMap<number, HTMLDivElement[]>();
@observable _screenCapture = false;
- @observable _clicking = false;
+ @observable _clicking = false; // used for transition between showing/hiding timeline
@observable _forceCreateYouTubeIFrame = false;
@observable _playTimer?: NodeJS.Timeout = undefined;
@observable _fullScreen = false;
@observable _playing = false;
+ @observable _finished: boolean = false; // has playback reached end of clip
+ @observable _volume: number = 1;
+ @observable _muted: boolean = false;
+
@computed get links() { return DocListCast(this.dataDoc.links); }
- @computed get heightPercent() { return NumCast(this.layoutDoc._timelineHeightPercent, 100); }
- @computed get duration() { return NumCast(this.dataDoc[this.fieldKey + "-duration"]); }
+ @computed get heightPercent() { return NumCast(this.layoutDoc._timelineHeightPercent, 100); } // current percent of video relative to VideoBox height
+ // @computed get rawDuration() { return NumCast(this.dataDoc[this.fieldKey + "-duration"]); }
+ @observable rawDuration: number = 0;
- private get transition() { return this._clicking ? "left 0.5s, width 0.5s, height 0.5s" : ""; }
- public get player(): HTMLVideoElement | null { return this._videoRef; }
- constructor(props: Readonly<ViewBoxAnnotatableProps & FieldViewProps>) {
- super(props);
- VideoBox.Instance = this;
+ @computed get youtubeVideoId() {
+ const field = Cast(this.dataDoc[this.props.fieldKey], VideoField);
+ return field && field.url.href.indexOf("youtube") !== -1 ? ((arr: string[]) => arr[arr.length - 1])(field.url.href.split("/")) : "";
}
- getAnchor = () => {
- const timecode = Cast(this.layoutDoc._currentTimecode, "number", null);
- const marquee = AnchorMenu.Instance.GetAnchor?.();
- return CollectionStackedTimeline.createAnchor(this.rootDoc, this.dataDoc, this.annotationKey, "_timecodeToShow"/* videoStart */, "_timecodeToHide" /* videoEnd */, timecode ? timecode : undefined, undefined, marquee) || this.rootDoc;
+
+ // returns the path of the audio file
+ @computed get audiopath() {
+ const field = Cast(this.props.Document[this.props.fieldKey + '-audio'], AudioField, null);
+ const vfield = Cast(this.dataDoc[this.fieldKey], VideoField, null);
+ return field?.url.href ?? vfield?.url.href ?? "";
}
- videoLoad = () => {
- const aspect = this.player!.videoWidth / this.player!.videoHeight;
- Doc.SetNativeWidth(this.dataDoc, this.player!.videoWidth);
- Doc.SetNativeHeight(this.dataDoc, this.player!.videoHeight);
- this.layoutDoc._height = NumCast(this.layoutDoc._width) / aspect;
- if (Number.isFinite(this.player!.duration)) {
- this.dataDoc[this.fieldKey + "-duration"] = this.player!.duration;
+
+ @computed private get timeline() { return this._stackedTimeline; }
+ private get transition() { return this._clicking ? "left 0.5s, width 0.5s, height 0.5s" : ""; } // css transition for hiding/showing timeline
+ public get player(): HTMLVideoElement | null { return this._videoRef; }
+
+
+ componentDidMount() {
+ this.props.setContentView?.(this); // this tells the DocumentView that this VideoBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the VideoBox when making a link.
+ if (this.youtubeVideoId) {
+ const youtubeaspect = 400 / 315;
+ const nativeWidth = Doc.NativeWidth(this.layoutDoc);
+ const nativeHeight = Doc.NativeHeight(this.layoutDoc);
+ if (!nativeWidth || !nativeHeight) {
+ if (!nativeWidth) Doc.SetNativeWidth(this.dataDoc, 600);
+ Doc.SetNativeHeight(this.dataDoc, (nativeWidth || 600) / youtubeaspect);
+ this.layoutDoc._height = NumCast(this.layoutDoc._width) / youtubeaspect;
+ }
}
+ this.player && this.setPlayheadTime(0);
}
+ componentWillUnmount() {
+ this.removeCurrentlyPlaying();
+ this.Pause();
+ Object.keys(this._disposers).forEach(d => this._disposers[d]?.());
+ }
+
+
+ // plays video
@action public Play = (update: boolean = true) => {
this._playing = true;
- try {
- this._audioPlayer && this.player && (this._audioPlayer.currentTime = this.player?.currentTime);
- update && this.player?.play();
- update && this._audioPlayer?.play();
- update && this._youtubePlayer?.playVideo();
- this._youtubePlayer && !this._playTimer && (this._playTimer = setInterval(this.updateTimecode, 5));
- } catch (e) {
- console.log("Video Play Exception:", e);
+ const eleTime = this.player?.currentTime || 0;
+ if (this.timeline) {
+ let start = eleTime >= this.timeline.trimEnd || eleTime <= this.timeline.trimStart ? this.timeline.trimStart : eleTime;
+
+ if (this._finished) {
+ // restarts video if reached end on previous play
+ this._finished = false;
+ start = this.timeline.trimStart;
+ }
+
+ try {
+ this._audioPlayer && this.player && (this._audioPlayer.currentTime = this.player?.currentTime);
+ update && this.player && this.playFrom(start, undefined, true);
+ update && this._audioPlayer?.play();
+ update && this._youtubePlayer?.playVideo();
+ this._youtubePlayer && !this._playTimer && (this._playTimer = setInterval(this.updateTimecode, 5));
+ } catch (e) {
+ console.log("Video Play Exception:", e);
+ }
}
this.updateTimecode();
}
+ // goes to time
@action public Seek(time: number) {
try {
this._youtubePlayer?.seekTo(Math.round(time), true);
@@ -107,8 +182,10 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this._audioPlayer && (this._audioPlayer.currentTime = time);
}
+ // pauses video
@action public Pause = (update: boolean = true) => {
this._playing = false;
+ this.removeCurrentlyPlaying();
try {
update && this.player?.pause();
update && this._audioPlayer?.pause();
@@ -120,12 +197,21 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
this._youtubePlayer && SelectionManager.DeselectAll(); // if we don't deselect the player, then we get an annoying YouTube spinner I guess telling us we're paused.
this._playTimer = undefined;
- this.props.renderDepth !== -1 && this.updateTimecode();
+ this.updateTimecode();
+ if (!this._finished) clearTimeout(this._playRegionTimer); // if paused in the middle of playback, prevents restart on next play
}
+ // toggles video full screen
@action public FullScreen = () => {
- this._fullScreen = true;
- this.player && this.player.requestFullscreen();
+ if (document.fullscreenElement === this._contentRef) {
+ this._fullScreen = false;
+ this.player && this._contentRef && document.exitFullscreen();
+ }
+ else {
+ this._fullScreen = true;
+ this.player && this._contentRef && this._contentRef.requestFullscreen();
+
+ }
try {
this._youtubePlayer && this.props.addDocTab(this.rootDoc, "add");
} catch (e) {
@@ -133,16 +219,15 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
- @action public Snapshot(downX?: number, downY?: number) {
+
+ // creates and links snapshot photo of current video frame
+ @action public Snapshot = (downX?: number, downY?: number, cb?: (filename: string, x: number | undefined, y: number | undefined) => void) => {
const width = NumCast(this.layoutDoc._width);
const canvas = document.createElement('canvas');
canvas.width = 640;
canvas.height = 640 * Doc.NativeHeight(this.layoutDoc) / (Doc.NativeWidth(this.layoutDoc) || 1);
const ctx = canvas.getContext('2d');//draw image to canvas. scale to target dimensions
if (ctx) {
- // ctx.rect(0, 0, canvas.width, canvas.height);
- // ctx.fillStyle = "blue";
- // ctx.fill();
this._videoRef && ctx.drawImage(this._videoRef, 0, 0, canvas.width, canvas.height);
}
@@ -172,11 +257,21 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
const encodedFilename = encodeURIComponent("snapshot" + retitled + "_" + (this.layoutDoc._currentTimecode || 0).toString().replace(/\./, "_"));
const filename = basename(encodedFilename);
VideoBox.convertDataUri(dataUrl, filename).then((returnedFilename: string) =>
- returnedFilename && this.createRealSummaryLink(returnedFilename, downX, downY));
+ returnedFilename && (cb ?? this.createRealSummaryLink)(returnedFilename, downX, downY));
}
}
- private createRealSummaryLink = (imagePath: string, downX?: number, downY?: number) => {
+ updateIcon = () => {
+ const makeIcon = (returnedfilename: string) => {
+ this.dataDoc.icon = new ImageField(returnedfilename);
+ this.dataDoc["icon-nativeWidth"] = this.layoutDoc[WidthSym]();
+ this.dataDoc["icon-nativeHeight"] = this.layoutDoc[HeightSym]();
+ };
+ this.Snapshot(undefined, undefined, makeIcon);
+ }
+
+ // creates link for snapshot
+ createRealSummaryLink = (imagePath: string, downX?: number, downY?: number) => {
const url = !imagePath.startsWith("/") ? Utils.CorsProxy(imagePath) : imagePath;
const width = NumCast(this.layoutDoc._width) || 1;
const height = NumCast(this.layoutDoc._height);
@@ -194,6 +289,27 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
(downX !== undefined && downY !== undefined) && DocumentManager.Instance.getFirstDocumentView(imageSummary)?.startDragging(downX, downY, "move", true));
}
+
+ getAnchor = () => {
+ const timecode = Cast(this.layoutDoc._currentTimecode, "number", null);
+ const marquee = AnchorMenu.Instance.GetAnchor?.();
+ return CollectionStackedTimeline.createAnchor(this.rootDoc, this.dataDoc, this.annotationKey, "_timecodeToShow"/* videoStart */, "_timecodeToHide" /* videoEnd */, timecode ? timecode : undefined, undefined, marquee) || this.rootDoc;
+ }
+
+
+ // sets video info on load
+ videoLoad = action(() => {
+ const aspect = this.player!.videoWidth / this.player!.videoHeight;
+ Doc.SetNativeWidth(this.dataDoc, this.player!.videoWidth);
+ Doc.SetNativeHeight(this.dataDoc, this.player!.videoHeight);
+ this.layoutDoc._height = NumCast(this.layoutDoc._width) / aspect;
+ if (Number.isFinite(this.player!.duration)) {
+ this.rawDuration = this.player!.duration;
+ }
+ });
+
+
+ // updates video time
@action
updateTimecode = () => {
this.player && (this.layoutDoc._currentTimecode = this.player.currentTime);
@@ -204,72 +320,32 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
- componentDidMount() {
- this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link.
- this._disposers.triggerVideo = reaction(
- () => !LinkDocPreview.LinkInfo && this.props.renderDepth !== -1 ? NumCast(this.Document._triggerVideo, null) : undefined,
- time => time !== undefined && setTimeout(() => {
- this.player && this.Play();
- setTimeout(() => this.Document._triggerVideo = undefined, 10);
- }, this.player ? 0 : 250), // wait for mainCont and try again to play
- { fireImmediately: true }
- );
- this._disposers.triggerStop = reaction(
- () => this.props.renderDepth !== -1 && !LinkDocPreview.LinkInfo ? NumCast(this.Document._triggerVideoStop, null) : undefined,
- stop => stop !== undefined && setTimeout(() => {
- this.player && this.Pause();
- setTimeout(() => this.Document._triggerVideoStop = undefined, 10);
- }, this.player ? 0 : 250), // wait for mainCont and try again to play
- { fireImmediately: true }
- );
- if (this.youtubeVideoId) {
- const youtubeaspect = 400 / 315;
- const nativeWidth = Doc.NativeWidth(this.layoutDoc);
- const nativeHeight = Doc.NativeHeight(this.layoutDoc);
- if (!nativeWidth || !nativeHeight) {
- if (!nativeWidth) Doc.SetNativeWidth(this.dataDoc, 600);
- Doc.SetNativeHeight(this.dataDoc, (nativeWidth || 600) / youtubeaspect);
- this.layoutDoc._height = NumCast(this.layoutDoc._width) / youtubeaspect;
- }
- }
- }
-
- componentWillUnmount() {
- this.Pause();
- Object.keys(this._disposers).forEach(d => this._disposers[d]?.());
- }
+ // sets video element ref
@action
setVideoRef = (vref: HTMLVideoElement | null) => {
this._videoRef = vref;
if (vref) {
this._videoRef!.ontimeupdate = this.updateTimecode;
// @ts-ignore
- vref.onfullscreenchange = action((e) => this._fullScreen = vref.webkitDisplayingFullscreen);
+ // vref.onfullscreenchange = action((e) => this._fullScreen = vref.webkitDisplayingFullscreen);
this._disposers.reactionDisposer?.();
this._disposers.reactionDisposer = reaction(() => NumCast(this.layoutDoc._currentTimecode),
time => !this._playing && (vref.currentTime = time), { fireImmediately: true });
}
}
- public static async convertDataUri(imageUri: string, returnedFilename: string, nosuffix = false) {
- try {
- const posting = Utils.prepend("/uploadURI");
- const returnedUri = await rp.post(posting, {
- body: {
- uri: imageUri,
- name: returnedFilename,
- nosuffix
- },
- json: true,
- });
- return returnedUri;
-
- } catch (e) {
- console.log("VideoBox :" + e);
+ // set ref for div that wraps video and controls for fullscreen
+ @action
+ setContentRef = (cref: HTMLDivElement | null) => {
+ this._contentRef = cref;
+ if (cref) {
+ cref.onfullscreenchange = action((e) => this._fullScreen = (document.fullscreenElement === cref));
}
}
+
+ // context menu
specificContextMenu = (e: React.MouseEvent): void => {
const field = Cast(this.dataDoc[this.props.fieldKey], VideoField);
if (field) {
@@ -283,31 +359,32 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this._videoRef!.srcObject = !this._screenCapture ? undefined : await (navigator.mediaDevices as any).getDisplayMedia({ video: true });
}), icon: "expand-arrows-alt"
});
+ subitems.push({ description: (this.layoutDoc.dontAutoFollowLinks ? "" : "Don't") + " follow links when encountered", event: () => this.layoutDoc.dontAutoFollowLinks = !this.layoutDoc.dontAutoFollowLinks, icon: "expand-arrows-alt" });
subitems.push({ description: (this.layoutDoc.dontAutoPlayFollowedLinks ? "" : "Don't") + " play when link is selected", event: () => this.layoutDoc.dontAutoPlayFollowedLinks = !this.layoutDoc.dontAutoPlayFollowedLinks, icon: "expand-arrows-alt" });
subitems.push({ description: (this.layoutDoc.autoPlayAnchors ? "Don't auto play" : "Auto play") + " anchors onClick", event: () => this.layoutDoc.autoPlayAnchors = !this.layoutDoc.autoPlayAnchors, icon: "expand-arrows-alt" });
- subitems.push({ description: "Toggle Native Controls", event: action(() => VideoBox._nativeControls = !VideoBox._nativeControls), icon: "expand-arrows-alt" });
+ // subitems.push({ description: "Toggle Native Controls", event: action(() => VideoBox._nativeControls = !VideoBox._nativeControls), icon: "expand-arrows-alt" });
+ // subitems.push({ description: "Start Trim All", event: () => this.startTrim(TrimScope.All), icon: "expand-arrows-alt" });
+ // subitems.push({ description: "Start Trim Clip", event: () => this.startTrim(TrimScope.Clip), icon: "expand-arrows-alt" });
+ // subitems.push({ description: "Stop Trim", event: () => this.finishTrim(), icon: "expand-arrows-alt" });
subitems.push({ description: "Copy path", event: () => { Utils.CopyText(url); }, icon: "expand-arrows-alt" });
ContextMenu.Instance.addItem({ description: "Options...", subitems: subitems, icon: "video" });
}
}
- // returns the path of the audio file
- @computed get audiopath() {
- const field = Cast(this.props.Document[this.props.fieldKey + '-audio'], AudioField, null);
- const vfield = Cast(this.dataDoc[this.fieldKey], VideoField, null);
- return field?.url.href ?? vfield?.url.href ?? "";
- }
+
// ref for updating time
- _audioPlayer: HTMLAudioElement | null = null;
setAudioRef = (e: HTMLAudioElement | null) => this._audioPlayer = e;
+
+ // renders the video and audio
@computed get content() {
const field = Cast(this.dataDoc[this.fieldKey], VideoField);
const interactive = CurrentUserUtils.SelectedTool !== InkTool.None || !this.props.isSelected() ? "" : "-interactive";
const classname = "videoBox-content" + (this._fullScreen ? "-fullScreen" : "") + interactive;
return !field ? <div key="loading">Loading</div> :
<div className="videoBox-contentContainer" key="container" style={{ mixBlendMode: "multiply" }}>
- <div className={classname}>
- <video key="video" autoPlay={this._screenCapture} ref={this.setVideoRef}
+ <div className={classname} ref={this.setContentRef} onPointerDown={(e) => this._fullScreen && e.stopPropagation()}>
+ {this.uIButtons}
+ <video key="video" autoPlay={this._screenCapture} ref={this.setVideoRef} style={this._fullScreen ? this.fullScreenSize() : {}}
onCanPlay={this.videoLoad}
controls={VideoBox._nativeControls}
onPlay={() => this.Play()}
@@ -326,10 +403,6 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
</div>;
}
- @computed get youtubeVideoId() {
- const field = Cast(this.dataDoc[this.props.fieldKey], VideoField);
- return field && field.url.href.indexOf("youtube") !== -1 ? ((arr: string[]) => arr[arr.length - 1])(field.url.href.split("/")) : "";
- }
@action youtubeIframeLoaded = (e: any) => {
if (!this._youtubeContentCreated) {
@@ -340,7 +413,8 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this.loadYouTube(e.target);
}
- private loadYouTube = (iframe: any) => {
+
+ loadYouTube = (iframe: any) => {
let started = true;
const onYoutubePlayerStateChange = (event: any) => runInAction(() => {
if (started && event.data === YT.PlayerState.PLAYING) {
@@ -372,48 +446,19 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
});
}
}
- private get uIButtons() {
- const curTime = NumCast(this.layoutDoc._currentTimecode);
- const nonNativeControls = [
- <Tooltip title={<div className="dash-tooltip">{"playback"}</div>} key="play" placement="bottom">
- <div className="videoBox-play" onPointerDown={this.onPlayDown} >
- <FontAwesomeIcon icon={this._playing ? "pause" : "play"} size="lg" />
- </div>
- </Tooltip>,
- <Tooltip title={<div className="dash-tooltip">{"timecode"}</div>} key="time" placement="bottom">
- <div className="videoBox-time" onPointerDown={this.onResetDown} >
- <span>{formatTime(curTime)}</span>
- <span style={{ fontSize: 8 }}>{" " + Math.floor((curTime - Math.trunc(curTime)) * 100).toString().padStart(2, "0")}</span>
- </div>
- </Tooltip>,
- <Tooltip title={<div className="dash-tooltip">{"view full screen"}</div>} key="full" placement="bottom">
- <div className="videoBox-full" onPointerDown={this.FullScreen}>
- <FontAwesomeIcon icon="expand" size="lg" />
- </div>
- </Tooltip>];
- return <div className="videoBox-ui">
- {[...(VideoBox._nativeControls ? [] : nonNativeControls),
- <Tooltip title={<div className="dash-tooltip">{"snapshot current frame"}</div>} key="snap" placement="bottom">
- <div className="videoBox-snapshot" onPointerDown={this.onSnapshotDown} >
- <FontAwesomeIcon icon="camera" size="lg" />
- </div>
- </Tooltip>,
- <Tooltip title={<div className="dash-tooltip">{"show annotation timeline"}</div>} key="timeline" placement="bottom">
- <div className="videoBox-timelineButton" onPointerDown={this.onTimelineHdlDown}>
- <FontAwesomeIcon icon="eye" size="lg" />
- </div>
- </Tooltip>,]}
- </div>;
- }
+
+ // for play button
onPlayDown = () => this._playing ? this.Pause() : this.Play();
+ // for fullscreen button
onFullDown = (e: React.PointerEvent) => {
this.FullScreen();
e.stopPropagation();
e.preventDefault();
}
+ // for snapshot button
onSnapshotDown = (e: React.PointerEvent) => {
setupMoveUpEvents(this, e, (e) => {
this.Snapshot(e.clientX, e.clientY);
@@ -421,14 +466,18 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}, emptyFunction, () => this.Snapshot());
}
- onTimelineHdlDown = action((e: React.PointerEvent) => {
+ // for show/hide timeline button, transitions between show/hide
+ @action
+ onTimelineHdlDown = (e: React.PointerEvent) => {
this._clicking = true;
setupMoveUpEvents(this, e,
- action((e: PointerEvent) => {
+ action(encodeURIComponent => {
this._clicking = false;
if (this.props.isContentActive()) {
- const local = this.props.ScreenToLocalTransform().scale(this.props.scaling?.() || 1).transformPoint(e.clientX, e.clientY);
- this.layoutDoc._timelineHeightPercent = Math.max(0, Math.min(100, local[1] / this.props.PanelHeight() * 100));
+ // const local = this.props.ScreenToLocalTransform().scale(this.props.scaling?.() || 1).transformPoint(e.clientX, e.clientY);
+ // this.layoutDoc._timelineHeightPercent = Math.max(0, Math.min(100, local[1] / this.props.PanelHeight() * 100));
+
+ this.layoutDoc._timelineHeightPercent = 80;
}
return false;
}), emptyFunction,
@@ -436,19 +485,30 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this.layoutDoc._timelineHeightPercent = this.heightPercent !== 100 ? 100 : VideoBox.heightPercent;
setTimeout(action(() => this._clicking = false), 500);
}, this.props.isContentActive(), this.props.isContentActive());
- });
+ }
- onResetDown = (e: React.PointerEvent) => {
- setupMoveUpEvents(this, e,
- (e: PointerEvent) => {
- this.Seek(Math.max(0, NumCast(this.layoutDoc._currentTimecode) + Math.sign(e.movementX) * 0.0333));
- e.stopImmediatePropagation();
- return false;
- },
- emptyFunction,
- (e: PointerEvent) => this.layoutDoc._currentTimecode = 0);
+
+ // removes video from currently playing display
+ @action
+ removeCurrentlyPlaying = () => {
+ if (CollectionStackedTimeline.CurrentlyPlaying) {
+ const index = CollectionStackedTimeline.CurrentlyPlaying.indexOf(this.layoutDoc);
+ index !== -1 && CollectionStackedTimeline.CurrentlyPlaying.splice(index, 1);
+ }
}
+ // adds video to currently playing display
+ @action
+ addCurrentlyPlaying = () => {
+ if (!CollectionStackedTimeline.CurrentlyPlaying) {
+ CollectionStackedTimeline.CurrentlyPlaying = [];
+ }
+ if (CollectionStackedTimeline.CurrentlyPlaying.indexOf(this.layoutDoc) === -1) {
+ CollectionStackedTimeline.CurrentlyPlaying.push(this.layoutDoc);
+ }
+ }
+
+
@computed get youtubeContent() {
this._youtubeIframeId = VideoBox._youtubeIframeCounter++;
this._youtubeContentCreated = this._forceCreateYouTubeIFrame ? true : true;
@@ -460,6 +520,8 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
src={`https://www.youtube.com/embed/${this.youtubeVideoId}?enablejsapi=1&rel=0&showinfo=1&autoplay=0&mute=1&start=${start}&modestbranding=1&controls=${VideoBox._nativeControls ? 1 : 0}`} />;
}
+
+ // for annotating, adds doc with time info
@action.bound
addDocWithTimecode(doc: Doc | Doc[]): boolean {
const docs = doc instanceof Doc ? [doc] : doc;
@@ -468,52 +530,245 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
return this.addDocument(doc);
}
- // play back the video from time
+
+ // play back the audio from seekTimeInSeconds, fullPlay tells whether clip is being played to end vs link range
@action
- playFrom = (seekTimeInSeconds: number, endTime: number = this.duration) => {
+ playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false) => {
clearTimeout(this._playRegionTimer);
- this._playRegionDuration = endTime - seekTimeInSeconds;
if (Number.isNaN(this.player?.duration)) {
setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500);
- } else if (this.player) {
- if (seekTimeInSeconds < 0) {
- if (seekTimeInSeconds > -1) {
- setTimeout(() => this.playFrom(0), -seekTimeInSeconds * 1000);
- } else {
- this.Pause();
- }
- } else if (seekTimeInSeconds <= this.player.duration) {
- this.player.currentTime = seekTimeInSeconds;
+ }
+ else if (this.player) {
+ // trimBounds override requested playback bounds
+ const end = Math.min(this.timeline?.trimEnd ?? this.rawDuration, endTime ?? this.timeline?.trimEnd ?? this.rawDuration);
+ const start = Math.max(this.timeline?.trimStart ?? 0, seekTimeInSeconds);
+ const playRegionDuration = end - start;
+ // checks if times are within clip range
+ if (seekTimeInSeconds >= 0 && (this.timeline?.trimStart || 0) <= end && seekTimeInSeconds <= (this.timeline?.trimEnd || this.rawDuration)) {
+ this.player.currentTime = start;
this._audioPlayer && (this._audioPlayer.currentTime = seekTimeInSeconds);
this.player.play();
this._audioPlayer?.play();
- runInAction(() => this._playing = true);
- if (endTime !== this.duration) {
- this._playRegionTimer = setTimeout(() => this.Pause(), (this._playRegionDuration) * 1000); // use setTimeout to play a specific duration
- }
+ this._playing = true;
+ this.addCurrentlyPlaying();
+ this._playRegionTimer = setTimeout(
+ () => {
+ // need to keep track of if end of clip is reached so on next play, clip restarts
+ if (fullPlay) this._finished = true;
+ // removes from currently playing if playback has reached end of range marker
+ else this.removeCurrentlyPlaying();
+ this.Pause();
+ }, playRegionDuration * 1000);
} else {
this.Pause();
}
}
}
+
+ // ends trim, hides trim controls and displays new clip
+ @undoBatch
+ finishTrim = action(() => {
+ this.Pause();
+ this.setPlayheadTime(Math.max(Math.min(this.timeline?.trimEnd || 0, this.player!.currentTime), this.timeline?.trimStart || 0));
+ this.timeline?.StopTrimming();
+ });
+
+ // displays trim controls to start trimming clip
+ startTrim = (scope: TrimScope) => {
+ this.Pause();
+ this.timeline?.StartTrimming(scope);
+ }
+
+ // for trim button, double click displays full clip, single displays curr trim bounds
+ onClipPointerDown = (e: React.PointerEvent) => {
+ // if timeline isn't shown, show first then trim
+ this.heightPercent >= 100 && this.onTimelineHdlDown(e);
+ this.timeline && setupMoveUpEvents(this, e, returnFalse, returnFalse, action((e: PointerEvent, doubleTap?: boolean) => {
+ if (doubleTap) {
+ this.startTrim(TrimScope.All);
+ } else if (this.timeline) {
+ this.Pause();
+ this.timeline.IsTrimming !== TrimScope.None ? this.finishTrim() : this.startTrim(TrimScope.Clip);
+ }
+ }));
+ }
+
+
+ // for volume slider sets volume
+ @action
+ setVolume = (volume: number) => {
+ if (this.player) {
+ this._volume = volume;
+ this.player.volume = volume;
+ if (this._muted) {
+ this.toggleMute();
+ }
+ }
+ }
+
+ // toggles video mute
+ @action
+ toggleMute = () => {
+ if (this.player) {
+ this._muted = !this._muted;
+ this.player.muted = this._muted;
+ }
+ }
+
+
+ // stretches vertically or horizontally depending on video orientation so video fits full screen
+ fullScreenSize() {
+ if (this._videoRef && this._videoRef.videoHeight / this._videoRef.videoWidth > 1) {
+ return { height: "100%" };
+ }
+ else {
+ return { width: "100%" };
+ }
+ }
+
+
+ // for zoom slider, sets timeline waveform zoom
+ zoom = (zoom: number) => {
+ this.timeline?.setZoom(zoom);
+ }
+
+
+ // plays link
playLink = (doc: Doc) => {
- const startTime = Math.max(0, (this._stackedTimeline.current?.anchorStart(doc) || 0));
- const endTime = this._stackedTimeline.current?.anchorEnd(doc);
+ const startTime = Math.max(0, (this._stackedTimeline?.anchorStart(doc) || 0));
+ const endTime = this.timeline?.anchorEnd(doc);
if (startTime !== undefined) {
if (!this.layoutDoc.dontAutoPlayFollowedLinks) endTime ? this.playFrom(startTime, endTime) : this.playFrom(startTime);
else this.Seek(startTime);
}
}
- playing = () => this._playing;
+
+ // starts marquee selection
+ marqueeDown = (e: React.PointerEvent) => {
+ if (!e.altKey && e.button === 0 && this.layoutDoc._viewScale === 1 && this.props.isContentActive(true) && ![InkTool.Highlighter, InkTool.Pen].includes(CurrentUserUtils.SelectedTool)) {
+ setupMoveUpEvents(this, e, action(e => {
+ MarqueeAnnotator.clearAnnotations(this._savedAnnotations);
+ this._marqueeing = [e.clientX, e.clientY];
+ return true;
+ }), returnFalse, () => MarqueeAnnotator.clearAnnotations(this._savedAnnotations), false);
+ }
+ }
+
+ // ends marquee selection
+ @action
+ finishMarquee = () => {
+ this._marqueeing = undefined;
+ this.props.select(true);
+ }
+
timelineWhenChildContentsActiveChanged = action((isActive: boolean) => this.props.whenChildContentsActiveChanged(this._isAnyChildContentActive = isActive));
+
timelineScreenToLocal = () => this.props.ScreenToLocalTransform().scale(this.scaling()).translate(0, -this.heightPercent / 100 * this.props.PanelHeight());
- setAnchorTime = (time: number) => this.player!.currentTime = this.layoutDoc._currentTimecode = time;
+
+ setPlayheadTime = (time: number) => this.player!.currentTime = this.layoutDoc._currentTimecode = time;
+
timelineHeight = () => this.props.PanelHeight() * (100 - this.heightPercent) / 100;
+
+ playing = () => this._playing;
+
+ contentFunc = () => [this.youtubeVideoId ? this.youtubeContent : this.content];
+
+ scaling = () => this.props.scaling?.() || 1;
+
+ panelWidth = () => this.props.PanelWidth() * this.heightPercent / 100;
+ panelHeight = () => this.layoutDoc._fitWidth ? this.panelWidth() / (Doc.NativeAspect(this.rootDoc) || 1) : this.props.PanelHeight() * this.heightPercent / 100;
+
+ screenToLocalTransform = () => {
+ const offset = (this.props.PanelWidth() - this.panelWidth()) / 2 / this.scaling();
+ return this.props.ScreenToLocalTransform().translate(-offset, 0).scale(100 / this.heightPercent);
+ }
+
+ marqueeFitScaling = () => (this.props.scaling?.() || 1) * this.heightPercent / 100;
+ marqueeOffset = () => [this.panelWidth() / 2 * (1 - this.heightPercent / 100) / (this.heightPercent / 100), 0];
+
+ timelineDocFilter = () => [`_timelineLabel:true,${Utils.noRecursionHack}:x`];
+
+
+ // renders video controls
+ @computed get uIButtons() {
+ const curTime = NumCast(this.layoutDoc._currentTimecode) - (this.timeline?.clipStart || 0);
+ return <div className="videoBox-ui" style={this._fullScreen || this.heightPercent === 100 ? { fontSize: "40px", minWidth: "80%" } : {}}>
+ <div className="videobox-button"
+ title={this._playing ? "play" : "pause"}
+ onPointerDown={this.onPlayDown}>
+ <FontAwesomeIcon icon={this._playing ? "pause" : "play"} />
+ </div>
+
+ {this.timeline && <div className="timecode-controls">
+ <div className="timecode-current">
+ {formatTime(curTime)}
+ </div>
+
+ {this._fullScreen || this.heightPercent === 100 ?
+ <div className="timeline-slider">
+ <input type="range" step="0.1" min={this.timeline.clipStart} max={this.timeline.clipEnd} value={curTime}
+ className="toolbar-slider time-progress"
+ onPointerDown={(e: React.PointerEvent) => { e.stopPropagation(); }}
+ onChange={(e: React.ChangeEvent<HTMLInputElement>) => this.setPlayheadTime(Number(e.target.value))}
+ />
+ </div>
+ :
+ <div>/</div>}
+
+ <div className="timecode-end">
+ {formatTime(this.timeline.clipDuration)}
+ </div>
+ </div>}
+
+ <div className="videobox-button"
+ title={"full screen"}
+ onPointerDown={this.onFullDown}>
+ <FontAwesomeIcon icon="expand" />
+ </div>
+
+ {!this._fullScreen && <div className="videobox-button"
+ title={"show timeline"}
+ onPointerDown={this.onTimelineHdlDown}>
+ <FontAwesomeIcon icon="eye" />
+ </div>}
+
+ {!this._fullScreen && <div className="videobox-button"
+ title={this.timeline?.IsTrimming !== TrimScope.None ? "finish trimming" : "start trim"}
+ onPointerDown={this.onClipPointerDown}>
+ <FontAwesomeIcon icon={this.timeline?.IsTrimming !== TrimScope.None ? "check" : "cut"} />
+ </div>}
+
+ <div className="videobox-button show-slider"
+ title={this._muted ? "unmute" : "mute"}
+ onPointerDown={(e) => { e.stopPropagation(); this.toggleMute(); }}>
+ <FontAwesomeIcon icon={this._muted ? "volume-mute" : "volume-up"} />
+ </div>
+ <input type="range" step="0.1" min="0" max="1" value={this._muted ? 0 : this._volume}
+ className="toolbar-slider volume"
+ onPointerDown={(e: React.PointerEvent) => e.stopPropagation()}
+ onChange={(e: React.ChangeEvent<HTMLInputElement>) => this.setVolume(Number(e.target.value))}
+ />
+
+ {!this._fullScreen && this.heightPercent !== 100 &&
+ <>
+ <div className="videobox-button" title="zoom">
+ <FontAwesomeIcon icon="search-plus" />
+ </div>
+ <input type="range" step="0.1" min="1" max="5" value={this.timeline?._zoomFactor}
+ className="toolbar-slider zoom"
+ onPointerDown={(e: React.PointerEvent) => { e.stopPropagation(); }}
+ onChange={(e: React.ChangeEvent<HTMLInputElement>) => { this.zoom(Number(e.target.value)); }}
+ />
+ </>}
+ </div>;
+ }
+
+ // renders CollectionStackedTimeline
@computed get renderTimeline() {
return <div className="videoBox-stackPanel" style={{ transition: this.transition, height: `${100 - this.heightPercent}%` }}>
- <CollectionStackedTimeline ref={this._stackedTimeline} {...this.props}
+ <CollectionStackedTimeline ref={action((r: any) => this._stackedTimeline = r)} {...this.props}
fieldKey={this.annotationKey}
dictationKey={this.fieldKey + "-dictation"}
mediaPath={this.audiopath}
@@ -522,59 +777,29 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
endTag={"_timecodeToHide" /* videoEnd */}
bringToFront={emptyFunction}
CollectionView={undefined}
- duration={this.duration}
playFrom={this.playFrom}
- setTime={this.setAnchorTime}
+ setTime={this.setPlayheadTime}
playing={this.playing}
isAnyChildContentActive={this.isAnyChildContentActive}
whenChildContentsActiveChanged={this.timelineWhenChildContentsActiveChanged}
+ moveDocument={this.moveDocument}
+ addDocument={this.addDocument}
removeDocument={this.removeDocument}
ScreenToLocalTransform={this.timelineScreenToLocal}
Play={this.Play}
Pause={this.Pause}
playLink={this.playLink}
PanelHeight={this.timelineHeight}
- trimming={false}
- trimStart={0}
- trimEnd={this.duration}
- trimDuration={this.duration}
- setStartTrim={emptyFunction}
- setEndTrim={emptyFunction}
+ rawDuration={this.rawDuration}
/>
</div>;
}
+ // renders annotation layer
@computed get annotationLayer() {
return <div className="videoBox-annotationLayer" style={{ transition: this.transition, height: `${this.heightPercent}%` }} ref={this._annotationLayer} />;
}
- marqueeDown = (e: React.PointerEvent) => {
- if (!e.altKey && e.button === 0 && this.layoutDoc._viewScale === 1 && this.props.isContentActive(true) && ![InkTool.Highlighter, InkTool.Pen].includes(CurrentUserUtils.SelectedTool)) {
- setupMoveUpEvents(this, e, action(e => {
- MarqueeAnnotator.clearAnnotations(this._savedAnnotations);
- this._marqueeing = [e.clientX, e.clientY];
- return true;
- }), returnFalse, () => MarqueeAnnotator.clearAnnotations(this._savedAnnotations), false);
- }
- }
-
- finishMarquee = action(() => {
- this._marqueeing = undefined;
- this.props.select(true);
- });
-
- @computed get fitWidth() { return this.props.docViewPath?.().slice(-1)[0].fitWidth; }
- contentFunc = () => [this.youtubeVideoId ? this.youtubeContent : this.content];
- scaling = () => this.props.scaling?.() || 1;
- panelWidth = (): number => this.fitWidth ? this.props.PanelWidth() : (Doc.NativeAspect(this.rootDoc) || 1) * this.panelHeight();
- panelHeight = (): number => this.fitWidth ? this.panelWidth() / (Doc.NativeAspect(this.rootDoc) || 1) : this.heightPercent / 100 * this.props.PanelHeight();
- screenToLocalTransform = () => {
- const offset = (this.props.PanelWidth() - this.panelWidth()) / 2 / this.scaling();
- return this.props.ScreenToLocalTransform().translate(-offset, 0).scale(100 / this.heightPercent);
- }
- marqueeFitScaling = () => (this.props.scaling?.() || 1) * this.heightPercent / 100;
- marqueeOffset = () => [this.panelWidth() / 2 * (1 - this.heightPercent / 100) / (this.heightPercent / 100), 0];
- timelineDocFilter = () => [`_timelineLabel:true,${Utils.noRecursionHack}:x`];
render() {
const borderRad = this.props.styleProvider?.(this.layoutDoc, this.props, StyleProp.BorderRounding);
const borderRadius = borderRad?.includes("px") ? `${Number(borderRad.split("px")[0]) / this.scaling()}px` : borderRad;
@@ -627,7 +852,6 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
mainCont={this._mainCont.current}
/>}
{this.renderTimeline}
- {this.uIButtons}
</div>
</div >);
}