aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authormehekj <mehek.jethani@gmail.com>2022-03-20 15:22:50 -0400
committermehekj <mehek.jethani@gmail.com>2022-03-20 15:22:50 -0400
commit39c85293f6c3d385ea64ba0db8c9736dfaaec993 (patch)
tree7d10a6a48e93b16cd1c8a4b285ec022f5b515738 /src
parentd746d32bb2ad4e3e8ea40774448a2d51697475ba (diff)
cleaned up files and added some comments
Diffstat (limited to 'src')
-rw-r--r--src/client/views/AudioWaveform.tsx137
-rw-r--r--src/client/views/collections/CollectionStackedTimeline.tsx102
-rw-r--r--src/client/views/nodes/AudioBox.tsx108
-rw-r--r--src/client/views/nodes/VideoBox.tsx146
4 files changed, 295 insertions, 198 deletions
diff --git a/src/client/views/AudioWaveform.tsx b/src/client/views/AudioWaveform.tsx
index 8a3c3c319..525c0ce5a 100644
--- a/src/client/views/AudioWaveform.tsx
+++ b/src/client/views/AudioWaveform.tsx
@@ -1,16 +1,26 @@
import React = require("react");
import axios from "axios";
-import { action, computed, reaction, IReactionDisposer } from "mobx";
+import { action, computed, IReactionDisposer, reaction } from "mobx";
import { observer } from "mobx-react";
import Waveform from "react-audio-waveform";
import { Doc } from "../../fields/Doc";
import { List } from "../../fields/List";
import { listSpec } from "../../fields/Schema";
-import { Cast, NumCast } from "../../fields/Types";
+import { Cast } from "../../fields/Types";
import { numberRange } from "../../Utils";
import "./AudioWaveform.scss";
import { Colors } from "./global/globalEnums";
-import Color = require("color");
+
+
+/**
+ * AudioWaveform
+ *
+ * Used in CollectionStackedTimeline to render a canvas with a visual of an audio waveform for AudioBox and VideoBox documents.
+ * Uses react-audio-waveform package.
+ * Bins the audio data into audioBuckets which are passed to package to render the lines.
+ * Calculates new buckets each time a new zoom factor or new set of trim bounds is created and stores it in a field on the layout doc with a title indicating the bounds and zoom for that list (see audioBucketField)
+ */
+
export interface AudioWaveformProps {
duration: number; // length of media clip
@@ -26,19 +36,24 @@ export interface AudioWaveformProps {
@observer
export class AudioWaveform extends React.Component<AudioWaveformProps> {
- public static NUMBER_OF_BUCKETS = 100;
+ public static NUMBER_OF_BUCKETS = 100; // number of buckets data is divided into to draw waveform lines
+
_disposer: IReactionDisposer | undefined;
+
@computed get waveHeight() { return Math.max(50, this.props.PanelHeight); }
+
@computed get clipStart() { return this.props.clipStart; }
@computed get clipEnd() { return this.props.clipEnd; }
@computed get zoomFactor() { return this.props.zoomFactor; }
- @computed get audioBuckets() { return Cast(this.props.layoutDoc[this.audioBucketField(this.clipStart, this.clipEnd, this.zoomFactor)], listSpec("number"), []); }
+ @computed get audioBuckets() { return Cast(this.props.layoutDoc[this.audioBucketField(this.clipStart, this.clipEnd, this.zoomFactor)], listSpec("number"), []); }
audioBucketField = (start: number, end: number, zoomFactor: number) => "audioBuckets/" + "/" + start.toFixed(2).replace(".", "_") + "/" + end.toFixed(2).replace(".", "_") + "/" + (zoomFactor * 10);
+
componentWillUnmount() {
this._disposer?.();
}
+
componentDidMount() {
this._disposer = reaction(() => ({ clipStart: this.clipStart, clipEnd: this.clipEnd, fieldKey: this.audioBucketField(this.clipStart, this.clipEnd, this.zoomFactor), zoomFactor: this.props.zoomFactor }),
({ clipStart, clipEnd, fieldKey, zoomFactor }) => {
@@ -49,7 +64,6 @@ export class AudioWaveform extends React.Component<AudioWaveformProps> {
setTimeout(() => this.createWaveformBuckets(fieldKey, clipStart, clipEnd, zoomFactor));
}
}, { fireImmediately: true });
-
}
// decodes the audio file into peaks for generating the waveform
@@ -86,16 +100,10 @@ export class AudioWaveform extends React.Component<AudioWaveformProps> {
}
);
}
+
render() {
return (
<div className="audioWaveform">
- {/* <Waveform
- barWidth={2}
- width={() => this.props.PanelWidth}
- height={this.props.PanelHeight}
- peaks={this.audioBuckets}
- color={Colors.MEDIUM_BLUE}
- /> */}
<Waveform
color={Colors.MEDIUM_BLUE_ALT}
height={this.waveHeight}
@@ -108,105 +116,4 @@ export class AudioWaveform extends React.Component<AudioWaveformProps> {
</div>
);
}
-}
-
-
-export interface WaveformProps {
- barWidth: number;
- width: () => number;
- height: () => number;
- peaks: number[];
- color: string;
-}
-
-// @observer
-// export class Waveform extends React.Component<WaveformProps> {
-// private _canvas: HTMLCanvasElement | null = null;
-
-// get width() { return this.props.width(); }
-// get height() { return this.props.height(); }
-// get peaks() { return this.props.peaks; }
-
-// componentDidMount() {
-// this.drawBars();
-// }
-
-// drawBars() {
-// const waveCanvasCtx = this._canvas?.getContext("2d");
-
-// if (waveCanvasCtx) {
-// const pixelRatio = window.devicePixelRatio;
-// console.log(pixelRatio);
-
-// const displayWidth = Math.round(this.width);
-// const displayHeight = Math.round(this.height);
-// waveCanvasCtx.canvas.width = this.width;
-// waveCanvasCtx.canvas.height = this.height;
-// waveCanvasCtx.canvas.style.width = `${displayWidth}px`;
-// waveCanvasCtx.canvas.style.height = `${displayHeight}px`;
-
-// waveCanvasCtx.clearRect(0, 0, this.width, this.height);
-
-// const hasMinVals = [].some.call(this.peaks, (val) => val < 0);
-// let filteredPeaks = this.peaks;
-// if (hasMinVals) {
-// // If the first value is negative, add 1 to the filtered indices
-// let indexOffset = 0;
-// if (this.peaks[0] < 0) {
-// indexOffset = 1;
-// }
-// filteredPeaks = [].filter.call(
-// this.peaks,
-// (_, index) => (index + indexOffset) % 2 == 0
-// );
-// }
-
-// const $ = 0.5;
-// const height = this.height;
-// const offsetY = 0;
-// const halfH = this.height / 2;
-// const length = filteredPeaks.length;
-// const bar = this.props.barWidth;
-// const gap = 2;
-// const step = bar + gap;
-
-// let absmax = 1;
-// absmax = this.absMax(filteredPeaks);
-
-// const scale = length / this.width;
-
-// waveCanvasCtx.fillStyle = this.props.color;
-
-// for (let i = 0; i < this.width; i += step) {
-// let h = Math.round(filteredPeaks[Math.floor(i * scale)] / absmax * halfH)
-// if (h === 0) {
-// h = 1
-// }
-// waveCanvasCtx.fillRect(i + $, halfH - h + offsetY, bar + $, h * 2)
-// }
-// }
-// }
-
-// absMax = (values: number[]) => {
-// let max = -Infinity;
-// for (const i in values) {
-// const num = Math.abs(values[i]);
-// if (num > max) {
-// max = num;
-// }
-// }
-
-// return max;
-// }
-
-
-// render() {
-// return this.props.peaks ? (
-// <canvas
-// ref={(instance) => {
-// this._canvas = instance;
-// }}
-// />
-// ) : null
-// }
-// } \ No newline at end of file
+} \ No newline at end of file
diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx
index 7d9dc39ae..bd7d0083b 100644
--- a/src/client/views/collections/CollectionStackedTimeline.tsx
+++ b/src/client/views/collections/CollectionStackedTimeline.tsx
@@ -43,6 +43,19 @@ import {
import { LabelBox } from "../nodes/LabelBox";
import "./CollectionStackedTimeline.scss";
+
+
+/**
+ * CollectionStackedTimeline
+ * Main component: CollectionStackedTimeline.tsx
+ * Supporting Components: AudioWaveform
+ *
+ * CollectionStackedTimeline is a collection view used for audio and video nodes to display a timeline of the temporal media documents with an audio waveform and markers for links and annotations
+ * The actual media is handled in the containing classes (AudioBox, VideoBox) but the timeline deals with rendering and updating timecodes, links, and trimming.
+ * When trimming there are two pairs of times that are tracked: trimStart and trimEnd are the bounds of the trim controls, clipStart and clipEnd are the actual trimmed playback bounds of the clip
+ */
+
+
type PanZoomDocument = makeInterface<[]>;
const PanZoomDocument = makeInterface();
export type CollectionStackedTimelineProps = {
@@ -60,38 +73,42 @@ export type CollectionStackedTimelineProps = {
fieldKey: string;
};
+// trimming state: shows full clip, current trim bounds, or not trimming
export enum TrimScope {
All = 2,
Clip = 1,
None = 0,
}
+
@observer
export class CollectionStackedTimeline extends CollectionSubView<
PanZoomDocument,
CollectionStackedTimelineProps
>(PanZoomDocument) {
- @observable static SelectingRegion: CollectionStackedTimeline | undefined;
- @observable public static CurrentlyPlaying: Doc[];
+ @observable static SelectingRegion: CollectionStackedTimeline | undefined; // timeline selection region
+ @observable public static CurrentlyPlaying: Doc[]; // tracks all currently playing audio and video docs
static RangeScript: ScriptField;
static LabelScript: ScriptField;
static RangePlayScript: ScriptField;
static LabelPlayScript: ScriptField;
- private _timeline: HTMLDivElement | null = null;
- private _timelineWrapper: HTMLDivElement | null = null;
+ private _timeline: HTMLDivElement | null = null; // ref to actual timeline div
+ private _timelineWrapper: HTMLDivElement | null = null; // ref to timeline wrapper div for zooming and scrolling
private _markerStart: number = 0;
@observable _markerEnd: number | undefined;
@observable _trimming: number = TrimScope.None;
- @observable _trimStart: number = 0;
- @observable _trimEnd: number = 0;
+ @observable _trimStart: number = 0; // trim controls start pos
+ @observable _trimEnd: number = 0; // trim controls end pos
@observable _zoomFactor: number = 1;
@observable _scroll: number = 0;
+ // ensures that clip doesn't get trimmed so small that controls cannot be adjusted anymore
get minTrimLength() { return Math.max(this._timeline?.getBoundingClientRect() ? 0.05 * this.clipDuration : 0, 0.5) }
+
@computed get trimStart() { return this.IsTrimming !== TrimScope.None ? this._trimStart : this.clipStart; }
@computed get trimDuration() { return this.trimEnd - this.trimStart; }
@computed get trimEnd() { return this.IsTrimming !== TrimScope.None ? this._trimEnd : this.clipEnd; }
@@ -104,6 +121,7 @@ export class CollectionStackedTimeline extends CollectionSubView<
@computed get zoomFactor() { return this._zoomFactor }
+
constructor(props: any) {
super(props);
// onClick play scripts
@@ -135,6 +153,7 @@ export class CollectionStackedTimeline extends CollectionSubView<
}
}
+
public get IsTrimming() { return this._trimming; }
@action
@@ -155,24 +174,31 @@ export class CollectionStackedTimeline extends CollectionSubView<
this._zoomFactor = zoom;
}
+
anchorStart = (anchor: Doc) => NumCast(anchor._timecodeToShow, NumCast(anchor[this.props.startTag]));
anchorEnd = (anchor: Doc, val: any = null) => NumCast(anchor._timecodeToHide, NumCast(anchor[this.props.endTag], val) ?? null);
+
+
+ // converts screen pixel offset to time
toTimeline = (screen_delta: number, width: number) => {
return Math.max(
this.clipStart,
Math.min(this.clipEnd, (screen_delta / width) * this.clipDuration + this.clipStart));
}
+
rangeClickScript = () => CollectionStackedTimeline.RangeScript;
rangePlayScript = () => CollectionStackedTimeline.RangePlayScript;
- // for creating key anchors with key events
+
+ // handles key events for for creating key anchors, scrubbing, exiting trim
@action
keyEvents = (e: KeyboardEvent) => {
if (
!(e.target instanceof HTMLInputElement) &&
this.props.isSelected(true)
) {
+ // if shift pressed scrub 1 second otherwise 1/10th
const jump = e.shiftKey ? 1 : 0.1;
e.stopPropagation();
switch (e.key) {
@@ -196,6 +222,7 @@ export class CollectionStackedTimeline extends CollectionSubView<
}
break;
case "Escape":
+ // abandons current trim
this._trimStart = this.clipStart;
this._trimStart = this.clipEnd;
this._trimming = TrimScope.None;
@@ -210,6 +237,7 @@ export class CollectionStackedTimeline extends CollectionSubView<
}
}
+
getLinkData(l: Doc) {
let la1 = l.anchor1 as Doc;
let la2 = l.anchor2 as Doc;
@@ -224,7 +252,8 @@ export class CollectionStackedTimeline extends CollectionSubView<
return { la1, la2, linkTime };
}
- // starting the drag event for anchor resizing
+
+ // handles dragging selection to create markers
@action
onPointerDownTimeline = (e: React.PointerEvent): void => {
const rect = this._timeline?.getBoundingClientRect();
@@ -299,6 +328,8 @@ export class CollectionStackedTimeline extends CollectionSubView<
}
+
+ // for dragging trim start handle
@action
trimLeft = (e: React.PointerEvent): void => {
const rect = this._timeline?.getBoundingClientRect();
@@ -325,6 +356,7 @@ export class CollectionStackedTimeline extends CollectionSubView<
);
}
+ // for dragging trim end handle
@action
trimRight = (e: React.PointerEvent): void => {
const rect = this._timeline?.getBoundingClientRect();
@@ -351,12 +383,15 @@ export class CollectionStackedTimeline extends CollectionSubView<
);
}
+
+ // for rendering scrolling when timeline zoomed
@action
setScroll = (e: React.UIEvent) => {
e.stopPropagation();
this._scroll = this._timelineWrapper!.scrollLeft;
}
+ // smooth scrolls to time like when following links overflowed due to zoom
@action
scrollToTime = (time: number) => {
if (this._timelineWrapper) {
@@ -371,6 +406,8 @@ export class CollectionStackedTimeline extends CollectionSubView<
}
}
+
+ // handles dragging and dropping markers in timeline
@action
internalDocDrop(e: Event, de: DragManager.DropEvent, docDragData: DragManager.DocumentDragData, xp: number) {
if (!de.embedKey && this.props.layerProvider?.(this.props.Document) !== false && this.props.Document._isGroup) return false;
@@ -396,6 +433,8 @@ export class CollectionStackedTimeline extends CollectionSubView<
return false;
}
+
+ // creates marker on timeline
@undoBatch
@action
static createAnchor(
@@ -430,6 +469,7 @@ export class CollectionStackedTimeline extends CollectionSubView<
return anchor;
}
+
@action
playOnClick = (anchorDoc: Doc, clientX: number) => {
const seekTimeInSeconds = this.anchorStart(anchorDoc) - 0.25;
@@ -521,14 +561,20 @@ export class CollectionStackedTimeline extends CollectionSubView<
return level;
}
+
dictationHeightPercent = 50;
dictationHeight = () => (this.props.PanelHeight() * (100 - this.dictationHeightPercent)) / 100;
+
@computed get timelineContentHeight() { return this.props.PanelHeight() * this.dictationHeightPercent / 100; }
@computed get timelineContentWidth() { return this.props.PanelWidth() * this.zoomFactor - 4 }; // subtract size of container border
+
dictationScreenToLocalTransform = () => this.props.ScreenToLocalTransform().translate(0, -this.timelineContentHeight);
+
isContentActive = () => this.props.isSelected() || this.props.isContentActive();
+
currentTimecode = () => this.currentTime;
+
@computed get renderDictation() {
const dictation = Cast(this.dataDoc[this.props.dictationKey], Doc, null);
return !dictation ? null : (
@@ -565,23 +611,8 @@ export class CollectionStackedTimeline extends CollectionSubView<
</div>
);
}
- @computed get renderAudioWaveform() {
- return !this.props.mediaPath ? null : (
- <div className="collectionStackedTimeline-waveform">
- <AudioWaveform
- rawDuration={this.props.rawDuration}
- duration={this.clipDuration}
- mediaPath={this.props.mediaPath}
- layoutDoc={this.layoutDoc}
- clipStart={this.clipStart}
- clipEnd={this.clipEnd}
- zoomFactor={this.zoomFactor}
- PanelHeight={this.timelineContentHeight}
- PanelWidth={this.timelineContentWidth}
- />
- </div>
- );
- }
+
+ // renders selection region on timeline
@computed get selectionContainer() {
const markerEnd = CollectionStackedTimeline.SelectingRegion === this ? this.currentTime : this._markerEnd;
return markerEnd === undefined ? null : (
@@ -668,7 +699,6 @@ export class CollectionStackedTimeline extends CollectionSubView<
);
})}
{!this.IsTrimming && this.selectionContainer}
- {/* {this.renderAudioWaveform} */}
<AudioWaveform
rawDuration={this.props.rawDuration}
duration={this.clipDuration}
@@ -728,6 +758,12 @@ export class CollectionStackedTimeline extends CollectionSubView<
}
}
+
+/**
+ * StackedTimelineAnchor
+ * creates the anchors to display markers, links, and embedded documents on timeline
+ */
+
interface StackedTimelineAnchorProps {
mark: Doc;
rangeClickScript: () => ScriptField;
@@ -753,20 +789,26 @@ interface StackedTimelineAnchorProps {
trimStart: number;
trimEnd: number;
}
+
+
@observer
class StackedTimelineAnchor extends React.Component<StackedTimelineAnchorProps> {
_lastTimecode: number;
_disposer: IReactionDisposer | undefined;
+
constructor(props: any) {
super(props);
this._lastTimecode = this.props.currentTimecode();
}
+ // updates marker document title to reflect correct timecodes
computeTitle = () => {
const start = Math.max(NumCast(this.props.mark[this.props.startTag]), this.props.trimStart) - this.props.trimStart;
const end = Math.min(NumCast(this.props.mark[this.props.endTag]), this.props.trimEnd) - this.props.trimStart;
return `#${formatTime(start)}-${formatTime(end)}`;
}
+
+
componentDidMount() {
this._disposer = reaction(
() => this.props.currentTimecode(),
@@ -805,9 +847,12 @@ class StackedTimelineAnchor extends React.Component<StackedTimelineAnchorProps>
}
);
}
+
componentWillUnmount() {
this._disposer?.();
}
+
+
// starting the drag event for anchor resizing
onAnchorDown = (e: React.PointerEvent, anchor: Doc, left: boolean): void => {
this.props._timeline?.setPointerCapture(e.pointerId);
@@ -851,11 +896,15 @@ class StackedTimelineAnchor extends React.Component<StackedTimelineAnchorProps>
);
}
+
+ // context menu
contextMenuItems = () => {
const resetTitle = { script: ScriptField.MakeFunction(`self.title = "#" + formatToTime(self["${this.props.startTag}"]) + "-" + formatToTime(self["${this.props.endTag}"])`)!, icon: "folder-plus", label: "Reset Title" };
return [resetTitle];
}
+
+ // renders anchor LabelBox
renderInner = computedFn(function (
this: StackedTimelineAnchor,
mark: Doc,
@@ -910,6 +959,7 @@ class StackedTimelineAnchor extends React.Component<StackedTimelineAnchorProps>
anchorScreenToLocalXf = () => this.props.ScreenToLocalTransform().translate(-this.props.left, -this.props.top);
width = () => this.props.width;
height = () => this.props.height;
+
render() {
const inner = this.renderInner(
this.props.mark,
diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx
index 9351bc3be..f5de31fcb 100644
--- a/src/client/views/nodes/AudioBox.tsx
+++ b/src/client/views/nodes/AudioBox.tsx
@@ -22,6 +22,22 @@ import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from "../DocComp
import "./AudioBox.scss";
import { FieldView, FieldViewProps } from "./FieldView";
+
+/**
+ * AudioBox
+ * Main component: AudioBox.tsx
+ * Supporting Components: CollectionStackedTimeline, AudioWaveform
+ *
+ * AudioBox is a node that supports the recording and playback of audio files in Dash.
+ * When an audio file is importeed into Dash, it is immediately rendered as an AudioBox document.
+ * When a blank AudioBox node is created in Dash, audio recording controls are displayed and the user can start a recording which can be paused or stopped, and can use dictation to create a text transcript.
+ * Recording is done using the MediaDevices API to access the user's device microphone (see recordAudioAnnotation below)
+ * CollectionStackedTimeline handles AudioBox and VideoBox shared behavior, but AudioBox handles playing, pausing, etc because it contains <audio> element
+ * User can trim audio: nondestructive, just sets new bounds for playback and rendering timelin
+ */
+
+
+// used as a wrapper class for MediaStream from MediaDevices API
declare class MediaRecorder {
constructor(e: any); // whatever MediaRecorder has
}
@@ -35,44 +51,42 @@ enum media_state {
Paused = "paused",
Playing = "playing"
}
+
+
@observer
export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProps & FieldViewProps, AudioDocument>(AudioDocument) {
public static LayoutString(fieldKey: string) { return FieldView.LayoutString(AudioBox, fieldKey); }
- public static SetScrubTime = action((timeInMillisFrom1970: number) => {
- AudioBox._scrubTime = 0;
- AudioBox._scrubTime = timeInMillisFrom1970;
- });
public static Enabled = false;
- static topControlsHeight = 30; // width of playhead
- static bottomControlsHeight = 20; // height of timeline in percent of height of audioBox.
- @observable static _scrubTime = 0;
+
+ static topControlsHeight = 30; // height of upper controls above timeline
+ static bottomControlsHeight = 20; // height of lower controls below timeline
_dropDisposer?: DragManager.DragDropDisposer;
_disposers: { [name: string]: IReactionDisposer } = {};
- _ele: HTMLAudioElement | null = null;
- _recorder: any;
+ _ele: HTMLAudioElement | null = null; // <audio> ref
+ _recorder: any; // MediaRecorder
_recordStart = 0;
- _pauseStart = 0;
+ _pauseStart = 0; // time when recording is paused (used to keep track of recording timecodes)
_pauseEnd = 0;
_pausedTime = 0;
- _stream: MediaStream | undefined;
- _play: any = null;
+ _stream: MediaStream | undefined; // passed to MediaRecorder, records device input audio
+ _play: any = null; // timeout for playback
- @observable _stackedTimeline: any;
- @observable _finished: boolean = false;
+ @observable _stackedTimeline: any; // CollectionStackedTimeline ref
+ @observable _finished: boolean = false; // has playback reached end of clip
@observable _volume: number = 1;
@observable _muted: boolean = false;
- @observable _paused: boolean = false;
+ @observable _paused: boolean = false; // is recording paused
// @observable rawDuration: number = 0; // computed from the length of the audio element when loaded
@computed get recordingStart() { return DateCast(this.dataDoc[this.fieldKey + "-recordingStart"])?.date.getTime(); }
@computed get rawDuration() { return NumCast(this.dataDoc[`${this.fieldKey}-duration`]); } // bcz: shouldn't be needed since it's computed from audio element
// mehek: not 100% sure but i think due to the order in which things are loaded this is necessary ^^
// if you get rid of it and set the value to 0 the timeline and waveform will set their bounds incorrectly
- @computed get miniPlayer() { return this.props.PanelHeight() < 50 }
+ @computed get miniPlayer() { return this.props.PanelHeight() < 50 } // used to collapse timeline when node is shrunk
@computed get links() { return DocListCast(this.dataDoc.links); }
- @computed get pauseTime() { return this._pauseEnd - this._pauseStart; } // total time paused to update the correct time
+ @computed get pauseTime() { return this._pauseEnd - this._pauseStart; } // total time paused to update the correct recording time
@computed get mediaState() { return this.layoutDoc.mediaState as media_state; }
@computed get path() { // returns the path of the audio file
const path = Cast(this.props.Document[this.fieldKey], AudioField, null)?.url.href || "";
@@ -80,12 +94,15 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
set mediaState(value) { this.layoutDoc.mediaState = value; }
- get timeline() { return this._stackedTimeline; } // can't be computed since it's not observable
+ @computed get timeline() { return this._stackedTimeline; } // returns CollectionStackedTimeline ref
+
componentWillUnmount() {
this.removeCurrentlyPlaying();
this._dropDisposer?.();
Object.values(this._disposers).forEach((disposer) => disposer?.());
+
+ // removes doc from active recordings if recording when closed
const ind = DocUtils.ActiveRecordings.indexOf(this);
ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1);
}
@@ -102,6 +119,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
getLinkData(l: Doc) {
let la1 = l.anchor1 as Doc;
let la2 = l.anchor2 as Doc;
@@ -131,7 +149,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
) || this.rootDoc;
}
- // for updating the timecode
+
+ // updates timecode and shows it in timeline, follows links at time
@action
timecodeChanged = () => {
if (this.mediaState !== media_state.Recording && this._ele) {
@@ -148,7 +167,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
- // play back the audio from time
+ // play back the audio from seekTimeInSeconds, fullPlay tells whether clip is being played to end vs link range
@action
playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false) => {
clearTimeout(this._play); // abort any previous clip ending
@@ -156,8 +175,10 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500);
}
else if (this.timeline && this._ele && AudioBox.Enabled) {
+ // trimBounds override requested playback bounds
const end = Math.min(this.timeline.trimEnd, endTime ?? this.timeline.trimEnd);
const start = Math.max(this.timeline.trimStart, seekTimeInSeconds);
+ // checks if times are within clip range
if (seekTimeInSeconds >= 0 && this.timeline.trimStart <= end && seekTimeInSeconds <= this.timeline.trimEnd) {
this._ele.currentTime = start;
this._ele.play();
@@ -165,6 +186,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this.addCurrentlyPlaying();
this._play = setTimeout(
() => {
+ // need to keep track of if end of clip is reached so on next play, clip restarts
if (fullPlay) this._finished = true;
// removes from currently playing if playback has reached end of range marker
else this.removeCurrentlyPlaying();
@@ -177,6 +199,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
// removes from currently playing display
@action
removeCurrentlyPlaying = () => {
@@ -186,6 +209,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+ // adds doc to currently playing display
@action
addCurrentlyPlaying = () => {
if (!CollectionStackedTimeline.CurrentlyPlaying) {
@@ -196,6 +220,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
// update the recording time
updateRecordTime = () => {
if (this.mediaState === media_state.Recording) {
@@ -227,6 +252,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
setTimeout(this.stopRecording, 60 * 60 * 1000); // stop after an hour
}
+ // stops recording
@action
stopRecording = () => {
if (this._recorder) {
@@ -240,6 +266,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
// context menu
specificContextMenu = (e: React.MouseEvent): void => {
const funcs: ContextMenuProps[] = [];
@@ -270,6 +297,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
});
}
+
// button for starting and stopping the recording
Record = (e: React.MouseEvent) => {
if (e.button === 0 && !e.ctrlKey) {
@@ -284,11 +312,16 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
if (this.timeline && this._ele) {
const eleTime = this._ele.currentTime;
+
+ // if curr timecode outside of trim bounds, set it to start
let start = eleTime >= this.timeline.trimEnd || eleTime <= this.timeline.trimStart ? this.timeline.trimStart : eleTime;
+
+ // restarts clip if reached end on last play
if (this._finished) {
this._finished = false;
start = this.timeline.trimStart;
}
+
this.playFrom(start, this.timeline.trimEnd, true);
}
}
@@ -299,12 +332,14 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
if (this._ele) {
this._ele.pause();
this.mediaState = media_state.Paused;
+
+ // if paused in the middle of playback, prevents restart on next play
if (!this._finished) clearTimeout(this._play);
this.removeCurrentlyPlaying();
}
}
- // creates a text document for dictation
+ // for dictation button, creates a text document for dictation
onFile = (e: any) => {
const newDoc = CurrentUserUtils.GetNewTextDoc(
"",
@@ -326,13 +361,15 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
e.stopPropagation();
}
- // ref for updating time
+
+ // sets <audio> ref for updating time
setRef = (e: HTMLAudioElement | null) => {
e?.addEventListener("timeupdate", this.timecodeChanged);
e?.addEventListener("ended", () => { this._finished = true; this.Pause() });
this._ele = e;
}
+
// pause the time during recording phase
@action
recordPause = (e: React.MouseEvent) => {
@@ -351,6 +388,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
e.stopPropagation();
}
+
+ // plays link
playLink = (link: Doc) => {
if (link.annotationOn === this.rootDoc) {
if (!this.layoutDoc.dontAutoPlayFollowedLinks) {
@@ -376,30 +415,39 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
@action
timelineWhenChildContentsActiveChanged = (isActive: boolean) =>
- this.props.whenChildContentsActiveChanged(this._isAnyChildContentActive = isActive)
+ this.props.whenChildContentsActiveChanged(this._isAnyChildContentActive = isActive);
+
timelineScreenToLocal = () =>
- this.props.ScreenToLocalTransform().translate(0, -AudioBox.bottomControlsHeight)
+ this.props.ScreenToLocalTransform().translate(0, -AudioBox.bottomControlsHeight);
+
setPlayheadTime = (time: number) => this._ele!.currentTime = this.layoutDoc._currentTimecode = time;
+
playing = () => this.mediaState === media_state.Playing;
+
isActiveChild = () => this._isAnyChildContentActive;
+ // timeline dimensions
timelineWidth = () => this.props.PanelWidth();
timelineHeight = () => (this.props.PanelHeight() - (AudioBox.topControlsHeight + AudioBox.bottomControlsHeight))
+ // ends trim, hides trim controls and displays new clip
@undoBatch
- finishTrim = () => { // hides trim controls and displays new clip
+ finishTrim = () => {
this.Pause();
this.setPlayheadTime(Math.max(Math.min(this.timeline?.trimEnd || 0, this._ele!.currentTime), this.timeline?.trimStart || 0));
this.timeline?.StopTrimming();
}
+ // displays trim controls to start trimming clip
startTrim = (scope: TrimScope) => {
this.Pause();
this.timeline?.StartTrimming(scope);
}
+ // for trim button, double click displays full clip, single displays curr trim bounds
onClipPointerDown = (e: React.PointerEvent) => {
e.stopPropagation();
this.timeline && setupMoveUpEvents(this, e, returnFalse, returnFalse, action((e: PointerEvent, doubleTap?: boolean) => {
@@ -412,10 +460,13 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}));
}
+
+ // for zoom slider, sets timeline waveform zoom
zoom = (zoom: number) => {
this.timeline?.setZoom(zoom);
}
+ // for volume slider sets volume
@action
setVolume = (volume: number) => {
if (this._ele) {
@@ -427,6 +478,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+ // toggles audio muted
@action
toggleMute = () => {
if (this._ele) {
@@ -435,6 +487,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
setupTimelineDrop = (r: HTMLDivElement | null) => {
if (r && this.timeline) {
this._dropDisposer?.();
@@ -447,6 +500,8 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
+ // UI for recording, initially displayed when new audio created in Dash
@computed get recordingControls() {
return <div className="audiobox-recorder">
<div className="audiobox-dictation" onClick={this.onFile}>
@@ -478,6 +533,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
</div>
}
+ // UI for playback, displayed for imported or recorded clips, hides timeline and collapses controls when node is shrunk vertically
@computed get playbackControls() {
return <div className="audiobox-file" style={{
pointerEvents: this._isAnyChildContentActive || this.props.isContentActive() ? "all" : "none",
@@ -544,6 +600,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
</div>
}
+ // gets CollectionStackedTimeline
@computed get renderTimeline() {
return (
<CollectionStackedTimeline
@@ -577,6 +634,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
/>
);
}
+
// returns the html audio element
@computed get audio() {
return <audio ref={this.setRef}
diff --git a/src/client/views/nodes/VideoBox.tsx b/src/client/views/nodes/VideoBox.tsx
index e47b41539..9797178b2 100644
--- a/src/client/views/nodes/VideoBox.tsx
+++ b/src/client/views/nodes/VideoBox.tsx
@@ -1,6 +1,5 @@
import React = require("react");
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
-import { Tooltip } from "@material-ui/core";
import { action, computed, IReactionDisposer, observable, ObservableMap, reaction, runInAction, untracked } from "mobx";
import { observer } from "mobx-react";
import * as rp from 'request-promise';
@@ -32,9 +31,24 @@ import { FieldView, FieldViewProps } from './FieldView';
import "./VideoBox.scss";
const path = require('path');
+
+/**
+ * VideoBox
+ * Main component: VideoBox.tsx
+ * Supporting Components: CollectionStackedTimeline
+ *
+ * VideoBox is a node that supports the playback of video files in Dash.
+ * When a video file or YouTube video is importeed into Dash, it is immediately rendered as a VideoBox document.
+ * CollectionStackedTimline handles AudioBox and VideoBox shared behavior, but VideoBox handles playing, pausing, etc because it contains <video> element
+ * User can trim video: nondestructive, just sets new bounds for playback and rendering timeline
+ * Like images, users can zoom and pan and it has an overlay layer allowing for annotations on top of the video at different times
+ */
+
+
type VideoDocument = makeInterface<[typeof documentSchema]>;
const VideoDocument = makeInterface(documentSchema);
+
@observer
export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProps & FieldViewProps, VideoDocument>(VideoDocument) {
public static LayoutString(fieldKey: string) { return FieldView.LayoutString(VideoBox, fieldKey); }
@@ -54,42 +68,45 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
console.log("VideoBox :" + e);
}
}
+
static _youtubeIframeCounter: number = 0;
- static heightPercent = 80; // height of timeline in percent of height of videoBox.
+ static heightPercent = 80; // height of video relative to videoBox when timeline is open
private _disposers: { [name: string]: IReactionDisposer } = {};
private _youtubePlayer: YT.Player | undefined = undefined;
- private _videoRef: HTMLVideoElement | null = null;
- private _contentRef: HTMLDivElement | null = null;
+ private _videoRef: HTMLVideoElement | null = null; // <video> ref
+ private _contentRef: HTMLDivElement | null = null; // ref to div that wraps video and controls for full screen
private _youtubeIframeId: number = -1;
private _youtubeContentCreated = false;
private _audioPlayer: HTMLAudioElement | null = null;
- private _mainCont: React.RefObject<HTMLDivElement> = React.createRef();
+ private _mainCont: React.RefObject<HTMLDivElement> = React.createRef(); // outermost div
private _annotationLayer: React.RefObject<HTMLDivElement> = React.createRef();
- private _playRegionTimer: any = null;
- private _playRegionDuration = 0;
- @observable _stackedTimeline: any;
- @observable static _nativeControls: boolean;
- @observable _marqueeing: number[] | undefined;
+ private _playRegionTimer: any = null; // timeout for playback
+ @observable _stackedTimeline: any; // CollectionStackedTimeline ref
+ @observable static _nativeControls: boolean; // default html controls
+ @observable _marqueeing: number[] | undefined; // coords for marquee selection
@observable _savedAnnotations = new ObservableMap<number, HTMLDivElement[]>();
@observable _screenCapture = false;
- @observable _clicking = false;
+ @observable _clicking = false; // used for transition between showing/hiding timeline
@observable _forceCreateYouTubeIFrame = false;
@observable _playTimer?: NodeJS.Timeout = undefined;
@observable _fullScreen = false;
@observable _playing = false;
- @observable _finished: boolean = false;
+ @observable _finished: boolean = false; // has playback reached end of clip
@observable _volume: number = 1;
@observable _muted: boolean = false;
@computed get links() { return DocListCast(this.dataDoc.links); }
- @computed get heightPercent() { return NumCast(this.layoutDoc._timelineHeightPercent, 100); }
+ @computed get heightPercent() { return NumCast(this.layoutDoc._timelineHeightPercent, 100); } // current percent of video relative to VideoBox height
// @computed get rawDuration() { return NumCast(this.dataDoc[this.fieldKey + "-duration"]); }
@observable rawDuration: number = 0;
+
@computed get youtubeVideoId() {
const field = Cast(this.dataDoc[this.props.fieldKey], VideoField);
return field && field.url.href.indexOf("youtube") !== -1 ? ((arr: string[]) => arr[arr.length - 1])(field.url.href.split("/")) : "";
}
+
+
// returns the path of the audio file
@computed get audiopath() {
const field = Cast(this.props.Document[this.props.fieldKey + '-audio'], AudioField, null);
@@ -97,12 +114,14 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
return field?.url.href ?? vfield?.url.href ?? "";
}
- private get timeline() { return this._stackedTimeline; }
- private get transition() { return this._clicking ? "left 0.5s, width 0.5s, height 0.5s" : ""; }
+
+ @computed private get timeline() { return this._stackedTimeline; }
+ private get transition() { return this._clicking ? "left 0.5s, width 0.5s, height 0.5s" : ""; } // css transition for hiding/showing timeline
public get player(): HTMLVideoElement | null { return this._videoRef; }
+
componentDidMount() {
- this.props.setContentView?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link.
+ this.props.setContentView?.(this); // this tells the DocumentView that this VideoBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the VideoBox when making a link.
if (this.youtubeVideoId) {
const youtubeaspect = 400 / 315;
const nativeWidth = Doc.NativeWidth(this.layoutDoc);
@@ -122,15 +141,20 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
Object.keys(this._disposers).forEach(d => this._disposers[d]?.());
}
+
+ // plays video
@action public Play = (update: boolean = true) => {
this._playing = true;
const eleTime = this.player?.currentTime || 0;
if (this.timeline) {
let start = eleTime >= this.timeline.trimEnd || eleTime <= this.timeline.trimStart ? this.timeline.trimStart : eleTime;
+
if (this._finished) {
+ // restarts video if reached end on previous play
this._finished = false;
start = this.timeline.trimStart;
}
+
try {
this._audioPlayer && this.player && (this._audioPlayer.currentTime = this.player?.currentTime);
update && this.player && this.playFrom(start, undefined, true);
@@ -144,6 +168,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this.updateTimecode();
}
+ // goes to time
@action public Seek(time: number) {
try {
this._youtubePlayer?.seekTo(Math.round(time), true);
@@ -154,6 +179,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this._audioPlayer && (this._audioPlayer.currentTime = time);
}
+ // pauses video
@action public Pause = (update: boolean = true) => {
this._playing = false;
this.removeCurrentlyPlaying();
@@ -169,9 +195,10 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this._youtubePlayer && SelectionManager.DeselectAll(); // if we don't deselect the player, then we get an annoying YouTube spinner I guess telling us we're paused.
this._playTimer = undefined;
this.updateTimecode();
- if (!this._finished) clearTimeout(this._playRegionTimer);;
+ if (!this._finished) clearTimeout(this._playRegionTimer); // if paused in the middle of playback, prevents restart on next play
}
+ // toggles video full screen
@action public FullScreen = () => {
if (document.fullscreenElement == this._contentRef) {
this._fullScreen = false;
@@ -189,6 +216,8 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
+ // creates and links snapshot photo of current video frame
@action public Snapshot(downX?: number, downY?: number) {
const width = (this.layoutDoc._width || 0);
const canvas = document.createElement('canvas');
@@ -231,6 +260,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+ // creates link for snapshot
createRealSummaryLink = (imagePath: string, downX?: number, downY?: number) => {
const url = !imagePath.startsWith("/") ? Utils.CorsProxy(imagePath) : imagePath;
const width = this.layoutDoc._width || 1;
@@ -249,12 +279,15 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
(downX !== undefined && downY !== undefined) && DocumentManager.Instance.getFirstDocumentView(imageSummary)?.startDragging(downX, downY, "move", true));
}
+
getAnchor = () => {
const timecode = Cast(this.layoutDoc._currentTimecode, "number", null);
const marquee = AnchorMenu.Instance.GetAnchor?.();
return CollectionStackedTimeline.createAnchor(this.rootDoc, this.dataDoc, this.annotationKey, "_timecodeToShow"/* videoStart */, "_timecodeToHide" /* videoEnd */, timecode ? timecode : undefined, undefined, marquee) || this.rootDoc;
}
+
+ // sets video info on load
videoLoad = action(() => {
const aspect = this.player!.videoWidth / this.player!.videoHeight;
Doc.SetNativeWidth(this.dataDoc, this.player!.videoWidth);
@@ -265,6 +298,8 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
})
+
+ // updates video time
@action
updateTimecode = () => {
this.player && (this.layoutDoc._currentTimecode = this.player.currentTime);
@@ -275,6 +310,8 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
+ // sets video element ref
@action
setVideoRef = (vref: HTMLVideoElement | null) => {
this._videoRef = vref;
@@ -288,6 +325,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+ // set ref for div that wraps video and controls for fullscreen
@action
setContentRef = (cref: HTMLDivElement | null) => {
this._contentRef = cref;
@@ -296,6 +334,8 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
+ // context menu
specificContextMenu = (e: React.MouseEvent): void => {
const field = Cast(this.dataDoc[this.props.fieldKey], VideoField);
if (field) {
@@ -321,8 +361,11 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
// ref for updating time
setAudioRef = (e: HTMLAudioElement | null) => this._audioPlayer = e;
+
+ // renders the video and audio
@computed get content() {
const field = Cast(this.dataDoc[this.fieldKey], VideoField);
const interactive = CurrentUserUtils.SelectedTool !== InkTool.None || !this.props.isSelected() ? "" : "-interactive";
@@ -350,6 +393,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
</div>;
}
+
@action youtubeIframeLoaded = (e: any) => {
if (!this._youtubeContentCreated) {
this._forceCreateYouTubeIFrame = !this._forceCreateYouTubeIFrame;
@@ -359,6 +403,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this.loadYouTube(e.target);
}
+
loadYouTube = (iframe: any) => {
let started = true;
const onYoutubePlayerStateChange = (event: any) => runInAction(() => {
@@ -392,14 +437,18 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
+ // for play button
onPlayDown = () => this._playing ? this.Pause() : this.Play();
+ // for fullscreen button
onFullDown = (e: React.PointerEvent) => {
this.FullScreen();
e.stopPropagation();
e.preventDefault();
}
+ // for snapshot button
onSnapshotDown = (e: React.PointerEvent) => {
setupMoveUpEvents(this, e, (e) => {
this.Snapshot(e.clientX, e.clientY);
@@ -407,6 +456,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}, emptyFunction, () => this.Snapshot());
}
+ // for show/hide timeline button, transitions between show/hide
@action
onTimelineHdlDown = (e: React.PointerEvent) => {
this._clicking = true;
@@ -427,18 +477,8 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}, this.props.isContentActive(), this.props.isContentActive());
}
- onResetDown = (e: React.PointerEvent) => {
- const start = this.timeline?.clipStart || 0;
- setupMoveUpEvents(this, e,
- e => {
- this.Seek(Math.max(start, (this.layoutDoc._currentTimecode || 0) + Math.sign(e.movementX) * 0.0333));
- e.stopImmediatePropagation();
- return false;
- },
- emptyFunction,
- (e: PointerEvent) => this.layoutDoc._currentTimecode = 0);
- }
+ // removes video from currently playing display
@action
removeCurrentlyPlaying = () => {
if (CollectionStackedTimeline.CurrentlyPlaying) {
@@ -447,6 +487,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+ // adds video to currently playing display
@action
addCurrentlyPlaying = () => {
if (!CollectionStackedTimeline.CurrentlyPlaying) {
@@ -457,6 +498,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
@computed get youtubeContent() {
this._youtubeIframeId = VideoBox._youtubeIframeCounter++;
this._youtubeContentCreated = this._forceCreateYouTubeIFrame ? true : true;
@@ -468,6 +510,8 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
src={`https://www.youtube.com/embed/${this.youtubeVideoId}?enablejsapi=1&rel=0&showinfo=1&autoplay=0&mute=1&start=${start}&modestbranding=1&controls=${VideoBox._nativeControls ? 1 : 0}`} />;
}
+
+ // for annotating, adds doc with time info
@action.bound
addDocWithTimecode(doc: Doc | Doc[]): boolean {
const docs = doc instanceof Doc ? [doc] : doc;
@@ -476,7 +520,8 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
return this.addDocument(doc);
}
- // play back the video from time
+
+ // play back the audio from seekTimeInSeconds, fullPlay tells whether clip is being played to end vs link range
@action
playFrom = (seekTimeInSeconds: number, endTime?: number, fullPlay: boolean = false) => {
clearTimeout(this._playRegionTimer);
@@ -484,9 +529,11 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500);
}
else if (this.player) {
+ // trimBounds override requested playback bounds
const end = Math.min(this.timeline?.trimEnd ?? this.rawDuration, endTime ?? this.timeline?.trimEnd ?? this.rawDuration);
const start = Math.max(this.timeline?.trimStart ?? 0, seekTimeInSeconds);
- this._playRegionDuration = end - start;
+ const playRegionDuration = end - start;
+ // checks if times are within clip range
if (seekTimeInSeconds >= 0 && (this.timeline?.trimStart || 0) <= end && seekTimeInSeconds <= (this.timeline?.trimEnd || this.rawDuration)) {
this.player.currentTime = start;
this._audioPlayer && (this._audioPlayer.currentTime = seekTimeInSeconds);
@@ -496,16 +543,20 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this.addCurrentlyPlaying();
this._playRegionTimer = setTimeout(
() => {
+ // need to keep track of if end of clip is reached so on next play, clip restarts
if (fullPlay) this._finished = true;
+ // removes from currently playing if playback has reached end of range marker
else this.removeCurrentlyPlaying();
this.Pause();
- }, this._playRegionDuration * 1000);
+ }, playRegionDuration * 1000);
} else {
this.Pause();
}
}
}
- // hides trim controls and displays new clip
+
+
+ // ends trim, hides trim controls and displays new clip
@undoBatch
finishTrim = action(() => {
this.Pause();
@@ -513,12 +564,15 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this.timeline?.StopTrimming();
});
+ // displays trim controls to start trimming clip
startTrim = (scope: TrimScope) => {
this.Pause();
this.timeline?.StartTrimming(scope);
}
+ // for trim button, double click displays full clip, single displays curr trim bounds
onClipPointerDown = (e: React.PointerEvent) => {
+ // if timeline isn't shown, show first then trim
this.heightPercent >= 100 && this.onTimelineHdlDown(e);
this.timeline && setupMoveUpEvents(this, e, returnFalse, returnFalse, action((e: PointerEvent, doubleTap?: boolean) => {
if (doubleTap) {
@@ -530,6 +584,8 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}));
}
+
+ // for volume slider sets volume
@action
setVolume = (volume: number) => {
if (this.player) {
@@ -541,6 +597,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+ // toggles video mute
@action
toggleMute = () => {
if (this.player) {
@@ -549,6 +606,8 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
+ // stretches vertically or horizontally depending on video orientation so video fits full screen
fullScreenSize() {
if (this._videoRef && this._videoRef.videoHeight / this._videoRef.videoWidth > 1) {
return { height: "100%" }
@@ -558,10 +617,14 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
+ // for zoom slider, sets timeline waveform zoom
zoom = (zoom: number) => {
this.timeline?.setZoom(zoom);
}
+
+ // plays link
playLink = (doc: Doc) => {
const startTime = Math.max(0, (this._stackedTimeline?.anchorStart(doc) || 0));
const endTime = this.timeline?.anchorEnd(doc);
@@ -571,6 +634,8 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+
+ // starts marquee selection
marqueeDown = (e: React.PointerEvent) => {
if (!e.altKey && e.button === 0 && this.layoutDoc._viewScale === 1 && this.props.isContentActive(true) && ![InkTool.Highlighter, InkTool.Pen].includes(CurrentUserUtils.SelectedTool)) {
setupMoveUpEvents(this, e, action(e => {
@@ -581,6 +646,7 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
}
+ // ends marquee selection
@action
finishMarquee = () => {
this._marqueeing = undefined;
@@ -588,23 +654,34 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
timelineWhenChildContentsActiveChanged = action((isActive: boolean) => this.props.whenChildContentsActiveChanged(this._isAnyChildContentActive = isActive));
+
timelineScreenToLocal = () => this.props.ScreenToLocalTransform().scale(this.scaling()).translate(0, -this.heightPercent / 100 * this.props.PanelHeight());
+
setPlayheadTime = (time: number) => this.player!.currentTime = this.layoutDoc._currentTimecode = time;
+
timelineHeight = () => this.props.PanelHeight() * (100 - this.heightPercent) / 100;
+
playing = () => this._playing;
contentFunc = () => [this.youtubeVideoId ? this.youtubeContent : this.content];
+
scaling = () => this.props.scaling?.() || 1;
+
panelWidth = () => this.props.PanelWidth() * this.heightPercent / 100;
panelHeight = () => this.layoutDoc._fitWidth ? this.panelWidth() / (Doc.NativeAspect(this.rootDoc) || 1) : this.props.PanelHeight() * this.heightPercent / 100;
+
screenToLocalTransform = () => {
const offset = (this.props.PanelWidth() - this.panelWidth()) / 2 / this.scaling();
return this.props.ScreenToLocalTransform().translate(-offset, 0).scale(100 / this.heightPercent);
}
+
marqueeFitScaling = () => (this.props.scaling?.() || 1) * this.heightPercent / 100;
marqueeOffset = () => [this.panelWidth() / 2 * (1 - this.heightPercent / 100) / (this.heightPercent / 100), 0];
+
timelineDocFilter = () => [`_timelineLabel:true,${Utils.noRecursionHack}:x`];
+
+ // renders video controls
@computed get uIButtons() {
const curTime = (this.layoutDoc._currentTimecode || 0) - (this.timeline?.clipStart || 0);
return <div className="videoBox-ui" style={this._fullScreen || this.heightPercent == 100 ? { fontSize: "40px", minWidth: "80%" } : {}}>
@@ -677,6 +754,8 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
</>}
</div>
}
+
+ // renders CollectionStackedTimeline
@computed get renderTimeline() {
return <div className="videoBox-stackPanel" style={{ transition: this.transition, height: `${100 - this.heightPercent}%` }}>
<CollectionStackedTimeline ref={action((r: any) => this._stackedTimeline = r)} {...this.props}
@@ -705,9 +784,12 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
/>
</div>;
}
+
+ // renders annotation layer
@computed get annotationLayer() {
return <div className="videoBox-annotationLayer" style={{ transition: this.transition, height: `${this.heightPercent}%` }} ref={this._annotationLayer} />;
}
+
render() {
const borderRad = this.props.styleProvider?.(this.layoutDoc, this.props, StyleProp.BorderRounding);
const borderRadius = borderRad?.includes("px") ? `${Number(borderRad.split("px")[0]) / this.scaling()}px` : borderRad;