aboutsummaryrefslogtreecommitdiff
path: root/src/client/views/nodes
diff options
context:
space:
mode:
Diffstat (limited to 'src/client/views/nodes')
-rw-r--r--src/client/views/nodes/AudioBox.scss338
-rw-r--r--src/client/views/nodes/AudioBox.tsx645
-rw-r--r--src/client/views/nodes/DocumentView.tsx1
-rw-r--r--src/client/views/nodes/LabelBox.tsx21
4 files changed, 681 insertions, 324 deletions
diff --git a/src/client/views/nodes/AudioBox.scss b/src/client/views/nodes/AudioBox.scss
index 3fcb024df..ac2b19fd6 100644
--- a/src/client/views/nodes/AudioBox.scss
+++ b/src/client/views/nodes/AudioBox.scss
@@ -1,188 +1,204 @@
+@import "../global/globalCssVariables.scss";
+
+
.audiobox-container,
.audiobox-container-interactive {
+ width: 100%;
+ height: 100%;
+ position: inherit;
+ display: flex;
+ position: relative;
+ cursor: default;
+
+ .audiobox-buttons {
+ display: flex;
width: 100%;
+ align-items: center;
height: 100%;
- position: inherit;
- display: flex;
- position: relative;
- cursor: default;
- .audiobox-buttons {
- display: flex;
- width: 100%;
- align-items: center;
- height: 100%;
-
- .audiobox-dictation {
- position: relative;
- width: 30px;
- height: 100%;
- align-items: center;
- display: inherit;
- background: dimgray;
- left: 0px;
- &:hover {
- color: white;
- cursor: pointer;
- }
- }
+ .audiobox-dictation {
+ position: relative;
+ width: 30px;
+ height: 100%;
+ align-items: center;
+ display: inherit;
+ background: $medium-gray;
+ left: 0px;
+ color: $dark-gray;
+ &:hover {
+ color: $black;
+ cursor: pointer;
+ }
}
+ }
- .audiobox-control,
- .audiobox-control-interactive {
- top: 0;
- max-height: 32px;
- width: 100%;
- display: inline-block;
- pointer-events: none;
- }
+ .audiobox-control,
+ .audiobox-control-interactive {
+ top: 0;
+ max-height: 32px;
+ width: 100%;
+ display: inline-block;
+ pointer-events: none;
+ }
+
+ .audiobox-control-interactive {
+ pointer-events: all;
+ }
- .audiobox-control-interactive {
- pointer-events: all;
+ .audiobox-record-interactive,
+ .audiobox-record {
+ pointer-events: all;
+ width: 100%;
+ height: 100%;
+ position: relative;
+ }
+
+ .audiobox-record {
+ pointer-events: none;
+ }
+
+ .recording {
+ margin-top: auto;
+ margin-bottom: auto;
+ width: 100%;
+ height: 100%;
+ position: relative;
+ padding-right: 5px;
+ display: flex;
+ background-color: $medium-blue;
+
+ .time {
+ position: relative;
+ height: 100%;
+ width: 100%;
+ font-size: $large-header;
+ text-align: center;
+ top: 5;
}
- .audiobox-record-interactive,
- .audiobox-record {
- pointer-events: all;
- width: 100%;
- height: 100%;
- position: relative;
+ .buttons {
+ position: relative;
+ margin-top: auto;
+ margin-bottom: auto;
+ width: 25px;
+ width: 25px;
+ padding: 5px;
+ color: $dark-gray;
+ &:hover {
+ color: $black;
+ }
}
+ }
- .audiobox-record {
- pointer-events: none;
+ .audiobox-controls {
+ width: 100%;
+ height: 100%;
+ position: relative;
+ display: flex;
+ background: $dark-gray;
+
+ .audiobox-dictation {
+ position: absolute;
+ width: 40px;
+ height: 100%;
+ align-items: center;
+ display: inherit;
+ background: $medium-gray;
+ left: 0px;
}
- .recording {
+ .audiobox-player {
+ margin-top: auto;
+ margin-bottom: auto;
+ width: 100%;
+ position: relative;
+ padding-right: 5px;
+ display: flex;
+ flex-direction: column;
+ justify-content: center;
+
+ .audiobox-buttons {
+ position: relative;
margin-top: auto;
margin-bottom: auto;
- width: 100%;
- height: 100%;
- position: relative;
- padding-right: 5px;
+ width: 30px;
+ height: 30px;
+ border-radius: 50%;
+ background-color: $dark-gray;
+ color: $white;
display: flex;
- background-color: red;
-
- .time {
- position: relative;
- height: 100%;
- width: 100%;
- font-size: 20;
- text-align: center;
- top: 5;
+ align-items: center;
+ justify-content: center;
+ left: 5px;
+ &:hover {
+ background-color: $black;
}
- .buttons {
- position: relative;
- margin-top: auto;
- margin-bottom: auto;
- width: 25px;
- padding: 5px;
- &:hover{
- background-color: crimson;
- }
+ svg {
+ width: 100%;
+ position: absolute;
+ border-width: "thin";
+ border-color: "white";
}
- }
+ }
- .audiobox-controls {
- width: 100%;
- height: 100%;
+ .audiobox-dictation {
position: relative;
- display: flex;
- padding-left: 2px;
- background: black;
-
- .audiobox-dictation {
- position: absolute;
- width: 30px;
- height: 100%;
- align-items: center;
- display: inherit;
- background: dimgray;
- left: 0px;
- }
+ margin-top: auto;
+ margin-bottom: auto;
+ width: 25px;
+ align-items: center;
+ display: inherit;
+ background: $medium-gray;
+ }
- .audiobox-player {
- margin-top: auto;
- margin-bottom: auto;
- width: 100%;
- position: relative;
- padding-right: 5px;
- display: flex;
-
- .audiobox-playhead {
- position: relative;
- margin-top: auto;
- margin-bottom: auto;
- margin-right: 2px;
- height: 25px;
- padding: 2px;
- border-radius: 50%;
- background-color: black;
- color: white;
- &:hover {
- background-color: grey;
- color: lightgrey;
- }
- }
-
- .audiobox-dictation {
- position: relative;
- margin-top: auto;
- margin-bottom: auto;
- width: 25px;
- padding: 2px;
- align-items: center;
- display: inherit;
- background: dimgray;
- }
-
- .audiobox-timeline {
- position: absolute;
- width: 100%;
- border: gray solid 1px;
- border-radius: 3px;
- z-index: 1000;
- overflow: hidden;
- }
-
- .audioBox-total-time,
- .audioBox-current-time {
- position: absolute;
- font-size: 8;
- top: 100%;
- color: white;
- }
- .audioBox-current-time {
- left: 30px;
- }
-
- .audioBox-total-time {
- right: 2px;
- }
- }
+ .audiobox-timeline {
+ position: absolute;
+ width: 100%;
+ z-index: 1000;
+ overflow: hidden;
+ border-right: 5px solid black;
+ }
+
+ .audioBox-total-time,
+ .audioBox-current-time {
+ position: absolute;
+ font-size: $small-text;
+ top: 100%;
+ color: $white;
+ }
+ .audioBox-current-time {
+ left: 42px;
+ }
+
+ .audioBox-total-time {
+ right: 2px;
+ }
}
+ }
}
-
@media only screen and (max-device-width: 480px) {
- .audiobox-dictation {
- font-size: 5em;
- display: flex;
- width: 100;
- justify-content: center;
- flex-direction: column;
- align-items: center;
- }
-
- .audiobox-container .audiobox-record,
- .audiobox-container-interactive .audiobox-record {
- font-size: 3em;
- }
-
- .audiobox-container .audiobox-controls .audiobox-player .audiobox-playhead,
- .audiobox-container .audiobox-controls .audiobox-player .audiobox-dictation,
- .audiobox-container-interactive .audiobox-controls .audiobox-player .audiobox-playhead {
- width: 70px;
- }
-} \ No newline at end of file
+ .audiobox-dictation {
+ font-size: 5em;
+ display: flex;
+ width: 100;
+ justify-content: center;
+ flex-direction: column;
+ align-items: center;
+ }
+
+ .audiobox-container .audiobox-record,
+ .audiobox-container-interactive .audiobox-record {
+ font-size: 3em;
+ }
+
+ .audiobox-container .audiobox-controls .audiobox-player .audiobox-buttons,
+ .audiobox-container .audiobox-controls .audiobox-player .audiobox-dictation,
+ .audiobox-container-interactive
+ .audiobox-controls
+ .audiobox-player
+ .audiobox-buttons {
+ width: 70px;
+ }
+}
diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx
index 60c655958..a3f03fc4b 100644
--- a/src/client/views/nodes/AudioBox.tsx
+++ b/src/client/views/nodes/AudioBox.tsx
@@ -1,6 +1,13 @@
import React = require("react");
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
-import { action, computed, IReactionDisposer, observable, reaction, runInAction } from "mobx";
+import {
+ action,
+ computed,
+ IReactionDisposer,
+ observable,
+ reaction,
+ runInAction,
+} from "mobx";
import { observer } from "mobx-react";
import { DateField } from "../../../fields/DateField";
import { Doc, DocListCast, Opt } from "../../../fields/Doc";
@@ -17,23 +24,34 @@ import { SnappingManager } from "../../util/SnappingManager";
import { CollectionStackedTimeline } from "../collections/CollectionStackedTimeline";
import { ContextMenu } from "../ContextMenu";
import { ContextMenuProps } from "../ContextMenuItem";
-import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from "../DocComponent";
+import {
+ ViewBoxAnnotatableComponent,
+ ViewBoxAnnotatableProps,
+} from "../DocComponent";
import "./AudioBox.scss";
-import { FieldView, FieldViewProps } from './FieldView';
+import { FieldView, FieldViewProps } from "./FieldView";
import { LinkDocPreview } from "./LinkDocPreview";
+import { faLessThan } from "@fortawesome/free-solid-svg-icons";
+import { Colors } from "../global/globalEnums";
+
declare class MediaRecorder {
- constructor(e: any); // whatever MediaRecorder has
+ constructor(e: any); // whatever MediaRecorder has
}
type AudioDocument = makeInterface<[typeof documentSchema]>;
const AudioDocument = makeInterface(documentSchema);
@observer
-export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProps & FieldViewProps, AudioDocument>(AudioDocument) {
- public static LayoutString(fieldKey: string) { return FieldView.LayoutString(AudioBox, fieldKey); }
+export class AudioBox extends ViewBoxAnnotatableComponent<
+ ViewBoxAnnotatableProps & FieldViewProps,
+ AudioDocument
+>(AudioDocument) {
+ public static LayoutString(fieldKey: string) {
+ return FieldView.LayoutString(AudioBox, fieldKey);
+ }
public static Enabled = false;
- static playheadWidth = 30; // width of playhead
- static heightPercent = 80; // height of timeline in percent of height of audioBox.
+ static playheadWidth = 40; // width of playhead
+ static heightPercent = 75; // height of timeline in percent of height of audioBox.
static Instance: AudioBox;
_disposers: { [name: string]: IReactionDisposer } = {};
@@ -47,35 +65,82 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
_stream: MediaStream | undefined;
_start: number = 0;
_play: any = null;
+ _ended: boolean = false;
@observable static _scrubTime = 0;
@observable _markerEnd: number = 0;
@observable _position: number = 0;
@observable _waveHeight: Opt<number> = this.layoutDoc._height;
@observable _paused: boolean = false;
- @computed get mediaState(): undefined | "pendingRecording" | "recording" | "paused" | "playing" { return this.dataDoc.mediaState as (undefined | "pendingRecording" | "recording" | "paused" | "playing"); }
- set mediaState(value) { this.dataDoc.mediaState = value; }
- public static SetScrubTime = action((timeInMillisFrom1970: number) => { AudioBox._scrubTime = 0; AudioBox._scrubTime = timeInMillisFrom1970; });
- @computed get recordingStart() { return Cast(this.dataDoc[this.props.fieldKey + "-recordingStart"], DateField)?.date.getTime(); }
- @computed get duration() { return NumCast(this.dataDoc[`${this.fieldKey}-duration`]); }
- @computed get anchorDocs() { return DocListCast(this.dataDoc[this.annotationKey]); }
- @computed get links() { return DocListCast(this.dataDoc.links); }
- @computed get pauseTime() { return this._pauseEnd - this._pauseStart; } // total time paused to update the correct time
- @computed get heightPercent() { return AudioBox.heightPercent; }
+ @observable _trimming: boolean = false;
+ @observable _trimStart: number = NumCast(this.layoutDoc.clipStart) ? NumCast(this.layoutDoc.clipStart) : 0;
+ @observable _trimEnd: number = NumCast(this.layoutDoc.clipEnd) ? NumCast(this.layoutDoc.clipEnd)
+ : this.duration;
+
+ @computed get mediaState():
+ | undefined
+ | "pendingRecording"
+ | "recording"
+ | "paused"
+ | "playing" {
+ return this.dataDoc.mediaState as
+ | undefined
+ | "pendingRecording"
+ | "recording"
+ | "paused"
+ | "playing";
+ }
+ set mediaState(value) {
+ this.dataDoc.mediaState = value;
+ }
+ public static SetScrubTime = action((timeInMillisFrom1970: number) => {
+ AudioBox._scrubTime = 0;
+ AudioBox._scrubTime = timeInMillisFrom1970;
+ });
+ @computed get recordingStart() {
+ return Cast(
+ this.dataDoc[this.props.fieldKey + "-recordingStart"],
+ DateField
+ )?.date.getTime();
+ }
+ @computed get duration() {
+ return NumCast(this.dataDoc[`${this.fieldKey}-duration`]);
+ }
+ @computed get trimDuration() {
+ return this._trimming && this._trimEnd ? this.duration : this._trimEnd - this._trimStart;
+ }
+ @computed get anchorDocs() {
+ return DocListCast(this.dataDoc[this.annotationKey]);
+ }
+ @computed get links() {
+ return DocListCast(this.dataDoc.links);
+ }
+ @computed get pauseTime() {
+ return this._pauseEnd - this._pauseStart;
+ } // total time paused to update the correct time
+ @computed get heightPercent() {
+ return AudioBox.heightPercent;
+ }
constructor(props: Readonly<ViewBoxAnnotatableProps & FieldViewProps>) {
super(props);
AudioBox.Instance = this;
if (this.duration === undefined) {
- runInAction(() => this.Document[this.fieldKey + "-duration"] = this.Document.duration);
+ runInAction(
+ () =>
+ (this.Document[this.fieldKey + "-duration"] = this.Document.duration)
+ );
}
}
getLinkData(l: Doc) {
let la1 = l.anchor1 as Doc;
let la2 = l.anchor2 as Doc;
- const linkTime = this._stackedTimeline.current?.anchorStart(la2) || this._stackedTimeline.current?.anchorStart(la1) || 0;
+ const linkTime =
+ this._stackedTimeline.current?.anchorStart(la2) ||
+ this._stackedTimeline.current?.anchorStart(la1) ||
+ 0;
if (Doc.AreProtosEqual(la1, this.dataDoc)) {
la1 = l.anchor2 as Doc;
la2 = l.anchor1 as Doc;
@@ -84,16 +149,26 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
getAnchor = () => {
- return CollectionStackedTimeline.createAnchor(this.rootDoc, this.dataDoc, this.annotationKey,
- "_timecodeToShow" /* audioStart */, "_timecodeToHide" /* audioEnd */, this._ele?.currentTime ||
- Cast(this.props.Document._currentTimecode, "number", null) || (this.mediaState === "recording" ? (Date.now() - (this.recordingStart || 0)) / 1000 : undefined))
- || this.rootDoc;
- }
+ return (
+ CollectionStackedTimeline.createAnchor(
+ this.rootDoc,
+ this.dataDoc,
+ this.annotationKey,
+ "_timecodeToShow" /* audioStart */,
+ "_timecodeToHide" /* audioEnd */,
+ this._ele?.currentTime ||
+ Cast(this.props.Document._currentTimecode, "number", null) ||
+ (this.mediaState === "recording"
+ ? (Date.now() - (this.recordingStart || 0)) / 1000
+ : undefined)
+ ) || this.rootDoc
+ );
+ };
componentWillUnmount() {
- Object.values(this._disposers).forEach(disposer => disposer?.());
+ Object.values(this._disposers).forEach((disposer) => disposer?.());
const ind = DocUtils.ActiveRecordings.indexOf(this);
- ind !== -1 && (DocUtils.ActiveRecordings.splice(ind, 1));
+ ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1);
}
@action
@@ -102,41 +177,70 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this.mediaState = this.path ? "paused" : undefined;
+ this.layoutDoc.clipStart = this.layoutDoc.clipStart ? this.layoutDoc.clipStart : 0;
+ this.layoutDoc.clipEnd = this.layoutDoc.clipEnd ? this.layoutDoc.clipEnd : this.duration ? this.duration : undefined;
+
+ this.path && this.setAnchorTime(NumCast(this.layoutDoc.clipStart));
+ this.path && this.timecodeChanged();
+
this._disposers.triggerAudio = reaction(
- () => !LinkDocPreview.LinkInfo && this.props.renderDepth !== -1 ? NumCast(this.Document._triggerAudio, null) : undefined,
- start => start !== undefined && setTimeout(() => {
- this.playFrom(start);
+ () =>
+ !LinkDocPreview.LinkInfo && this.props.renderDepth !== -1
+ ? NumCast(this.Document._triggerAudio, null)
+ : undefined,
+ (start) =>
+ start !== undefined &&
setTimeout(() => {
- this.Document._currentTimecode = start;
- this.Document._triggerAudio = undefined;
- }, 10);
- }), // wait for mainCont and try again to play
+ this.playFrom(start);
+ setTimeout(() => {
+ this.Document._currentTimecode = start;
+ this.Document._triggerAudio = undefined;
+ }, 10);
+ }), // wait for mainCont and try again to play
{ fireImmediately: true }
);
this._disposers.audioStop = reaction(
- () => this.props.renderDepth !== -1 && !LinkDocPreview.LinkInfo ? Cast(this.Document._audioStop, "number", null) : undefined,
- audioStop => audioStop !== undefined && setTimeout(() => {
- this.Pause();
- setTimeout(() => this.Document._audioStop = undefined, 10);
- }), // wait for mainCont and try again to play
+ () =>
+ this.props.renderDepth !== -1 && !LinkDocPreview.LinkInfo
+ ? Cast(this.Document._audioStop, "number", null)
+ : undefined,
+ (audioStop) =>
+ audioStop !== undefined &&
+ setTimeout(() => {
+ this.Pause();
+ setTimeout(() => (this.Document._audioStop = undefined), 10);
+ }), // wait for mainCont and try again to play
{ fireImmediately: true }
);
}
// for updating the timecode
+ @action
timecodeChanged = () => {
const htmlEle = this._ele;
if (this.mediaState !== "recording" && htmlEle) {
- htmlEle.duration && htmlEle.duration !== Infinity && runInAction(() => this.dataDoc[this.fieldKey + "-duration"] = htmlEle.duration);
- this.links.map(l => this.getLinkData(l)).forEach(({ la1, la2, linkTime }) => {
- if (linkTime > NumCast(this.layoutDoc._currentTimecode) && linkTime < htmlEle.currentTime) {
- Doc.linkFollowHighlight(la1);
- }
- });
+ htmlEle.duration &&
+ htmlEle.duration !== Infinity &&
+ runInAction(
+ () => (this.dataDoc[this.fieldKey + "-duration"] = htmlEle.duration)
+ );
+ this.layoutDoc.clipEnd = this.layoutDoc.clipEnd ? Math.min(this.duration, NumCast(this.layoutDoc.clipEnd)) : this.duration;
+ this._trimEnd = this._trimEnd ? Math.min(this.duration, this._trimEnd) : this.duration;
+ this.links
+ .map((l) => this.getLinkData(l))
+ .forEach(({ la1, la2, linkTime }) => {
+ if (
+ linkTime > NumCast(this.layoutDoc._currentTimecode) &&
+ linkTime < htmlEle.currentTime
+ ) {
+ Doc.linkFollowHighlight(la1);
+ }
+ });
this.layoutDoc._currentTimecode = htmlEle.currentTime;
+
}
- }
+ };
// pause play back
Pause = action(() => {
@@ -146,12 +250,13 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
// play audio for documents created during recording
playFromTime = (absoluteTime: number) => {
- this.recordingStart && this.playFrom((absoluteTime - this.recordingStart) / 1000);
- }
+ this.recordingStart &&
+ this.playFrom((absoluteTime - this.recordingStart) / 1000);
+ };
// play back the audio from time
@action
- playFrom = (seekTimeInSeconds: number, endTime: number = this.duration) => {
+ playFrom = (seekTimeInSeconds: number, endTime: number = this._trimEnd, fullPlay: boolean = false) => {
clearTimeout(this._play);
if (Number.isNaN(this._ele?.duration)) {
setTimeout(() => this.playFrom(seekTimeInSeconds, endTime), 500);
@@ -162,18 +267,26 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
} else {
this.Pause();
}
- } else if (seekTimeInSeconds <= this._ele.duration) {
- this._ele.currentTime = seekTimeInSeconds;
+ } else if (this._trimStart <= endTime && seekTimeInSeconds <= this._trimEnd) {
+ const start = Math.max(this._trimStart, seekTimeInSeconds);
+ const end = Math.min(this._trimEnd, endTime);
+ this._ele.currentTime = start;
this._ele.play();
- runInAction(() => this.mediaState = "playing");
+ runInAction(() => (this.mediaState = "playing"));
if (endTime !== this.duration) {
- this._play = setTimeout(() => this.Pause(), (endTime - seekTimeInSeconds) * 1000); // use setTimeout to play a specific duration
+ this._play = setTimeout(
+ () => {
+ this._ended = fullPlay ? true : this._ended;
+ this.Pause()
+ },
+ (end - start) * 1000
+ ); // use setTimeout to play a specific duration
}
} else {
this.Pause();
}
}
- }
+ };
// update the recording time
updateRecordTime = () => {
@@ -182,16 +295,19 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
if (this._paused) {
this._pausedTime += (new Date().getTime() - this._recordStart) / 1000;
} else {
- this.layoutDoc._currentTimecode = (new Date().getTime() - this._recordStart - this.pauseTime) / 1000;
+ this.layoutDoc._currentTimecode =
+ (new Date().getTime() - this._recordStart - this.pauseTime) / 1000;
}
}
- }
+ };
// starts recording
recordAudioAnnotation = async () => {
this._stream = await navigator.mediaDevices.getUserMedia({ audio: true });
this._recorder = new MediaRecorder(this._stream);
- this.dataDoc[this.props.fieldKey + "-recordingStart"] = new DateField(new Date());
+ this.dataDoc[this.props.fieldKey + "-recordingStart"] = new DateField(
+ new Date()
+ );
DocUtils.ActiveRecordings.push(this);
this._recorder.ondataavailable = async (e: any) => {
const [{ result }] = await Networking.UploadFilesToServer(e.data);
@@ -200,30 +316,58 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
}
};
this._recordStart = new Date().getTime();
- runInAction(() => this.mediaState = "recording");
+ runInAction(() => (this.mediaState = "recording"));
setTimeout(this.updateRecordTime, 0);
this._recorder.start();
setTimeout(() => this._recorder && this.stopRecording(), 60 * 60 * 1000); // stop after an hour
- }
+ };
// context menu
specificContextMenu = (e: React.MouseEvent): void => {
const funcs: ContextMenuProps[] = [];
- funcs.push({ description: (this.layoutDoc.hideAnchors ? "Don't hide" : "Hide") + " anchors", event: () => this.layoutDoc.hideAnchors = !this.layoutDoc.hideAnchors, icon: "expand-arrows-alt" });
- funcs.push({ description: (this.layoutDoc.dontAutoPlayFollowedLinks ? "" : "Don't") + " play when link is selected", event: () => this.layoutDoc.dontAutoPlayFollowedLinks = !this.layoutDoc.dontAutoPlayFollowedLinks, icon: "expand-arrows-alt" });
- funcs.push({ description: (this.layoutDoc.autoPlayAnchors ? "Don't auto play" : "Auto play") + " anchors onClick", event: () => this.layoutDoc.autoPlayAnchors = !this.layoutDoc.autoPlayAnchors, icon: "expand-arrows-alt" });
- ContextMenu.Instance?.addItem({ description: "Options...", subitems: funcs, icon: "asterisk" });
- }
+ funcs.push({
+ description:
+ (this.layoutDoc.hideAnchors ? "Don't hide" : "Hide") + " anchors",
+ event: () => (this.layoutDoc.hideAnchors = !this.layoutDoc.hideAnchors),
+ icon: "expand-arrows-alt",
+ });
+ funcs.push({
+ description:
+ (this.layoutDoc.dontAutoPlayFollowedLinks ? "" : "Don't") +
+ " play when link is selected",
+ event: () =>
+ (this.layoutDoc.dontAutoPlayFollowedLinks =
+ !this.layoutDoc.dontAutoPlayFollowedLinks),
+ icon: "expand-arrows-alt",
+ });
+ funcs.push({
+ description:
+ (this.layoutDoc.autoPlayAnchors ? "Don't auto play" : "Auto play") +
+ " anchors onClick",
+ event: () =>
+ (this.layoutDoc.autoPlayAnchors = !this.layoutDoc.autoPlayAnchors),
+ icon: "expand-arrows-alt",
+ });
+ ContextMenu.Instance?.addItem({
+ description: "Options...",
+ subitems: funcs,
+ icon: "asterisk",
+ });
+ };
// stops the recording
stopRecording = action(() => {
this._recorder.stop();
this._recorder = undefined;
- this.dataDoc[this.fieldKey + "-duration"] = (new Date().getTime() - this._recordStart - this.pauseTime) / 1000;
+ this.dataDoc[this.fieldKey + "-duration"] =
+ (new Date().getTime() - this._recordStart - this.pauseTime) / 1000;
this.mediaState = "paused";
+ this._trimEnd = this.duration;
+ this.layoutDoc.clipStart = 0;
+ this.layoutDoc.clipEnd = this.duration;
this._stream?.getAudioTracks()[0].stop();
const ind = DocUtils.ActiveRecordings.indexOf(this);
- ind !== -1 && (DocUtils.ActiveRecordings.splice(ind, 1));
+ ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1);
});
// button for starting and stopping the recording
@@ -232,45 +376,71 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this._recorder ? this.stopRecording() : this.recordAudioAnnotation();
e.stopPropagation();
}
- }
+ };
// for play button
Play = (e?: any) => {
- this.playFrom(this._ele!.paused ? this._ele!.currentTime : -1);
+ let start;
+ if (this._ended || this._ele!.currentTime == this.duration) {
+ start = this._trimStart;
+ this._ended = false;
+ }
+ else {
+ start = this._ele!.currentTime;
+ }
+
+ this.playFrom(start, this._trimEnd, true);
e?.stopPropagation?.();
- }
+ };
// creates a text document for dictation
onFile = (e: any) => {
- const newDoc = CurrentUserUtils.GetNewTextDoc("", NumCast(this.props.Document.x), NumCast(this.props.Document.y) + NumCast(this.props.Document._height) + 10,
- NumCast(this.props.Document._width), 2 * NumCast(this.props.Document._height));
+ const newDoc = CurrentUserUtils.GetNewTextDoc(
+ "",
+ NumCast(this.props.Document.x),
+ NumCast(this.props.Document.y) +
+ NumCast(this.props.Document._height) +
+ 10,
+ NumCast(this.props.Document._width),
+ 2 * NumCast(this.props.Document._height)
+ );
Doc.GetProto(newDoc).recordingSource = this.dataDoc;
- Doc.GetProto(newDoc).recordingStart = ComputedField.MakeFunction(`self.recordingSource["${this.props.fieldKey}-recordingStart"]`);
- Doc.GetProto(newDoc).mediaState = ComputedField.MakeFunction("self.recordingSource.mediaState");
+ Doc.GetProto(newDoc).recordingStart = ComputedField.MakeFunction(
+ `self.recordingSource["${this.props.fieldKey}-recordingStart"]`
+ );
+ Doc.GetProto(newDoc).mediaState = ComputedField.MakeFunction(
+ "self.recordingSource.mediaState"
+ );
this.props.addDocument?.(newDoc);
e.stopPropagation();
- }
+ };
// ref for updating time
setRef = (e: HTMLAudioElement | null) => {
e?.addEventListener("timeupdate", this.timecodeChanged);
e?.addEventListener("ended", this.Pause);
this._ele = e;
- }
+ };
// returns the path of the audio file
@computed get path() {
const field = Cast(this.props.Document[this.props.fieldKey], AudioField);
- const path = (field instanceof AudioField) ? field.url.href : "";
+ const path = field instanceof AudioField ? field.url.href : "";
return path === nullAudio ? "" : path;
}
// returns the html audio element
@computed get audio() {
- return <audio ref={this.setRef} className={`audiobox-control${this.isContentActive() ? "-interactive" : ""}`}>
- <source src={this.path} type="audio/mpeg" />
- Not supported.
- </audio>;
+ return (
+ <audio
+ ref={this.setRef}
+ className={`audiobox-control${this.isContentActive() ? "-interactive" : ""
+ }`}
+ >
+ <source src={this.path} type="audio/mpeg" />
+ Not supported.
+ </audio>
+ );
}
// pause the time during recording phase
@@ -280,7 +450,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this._paused = true;
this._recorder.pause();
e.stopPropagation();
- }
+ };
// continue the recording
@action
@@ -289,104 +459,259 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
this._paused = false;
this._recorder.resume();
e.stopPropagation();
- }
+ };
playing = () => this.mediaState === "playing";
playLink = (link: Doc) => {
const stack = this._stackedTimeline.current;
if (link.annotationOn === this.rootDoc) {
- if (!this.layoutDoc.dontAutoPlayFollowedLinks) this.playFrom(stack?.anchorStart(link) || 0, stack?.anchorEnd(link));
- else this._ele!.currentTime = this.layoutDoc._currentTimecode = (stack?.anchorStart(link) || 0);
+ if (!this.layoutDoc.dontAutoPlayFollowedLinks)
+ this.playFrom(stack?.anchorStart(link) || 0, stack?.anchorEnd(link));
+ else
+ this._ele!.currentTime = this.layoutDoc._currentTimecode =
+ stack?.anchorStart(link) || 0;
+ } else {
+ this.links
+ .filter((l) => l.anchor1 === link || l.anchor2 === link)
+ .forEach((l) => {
+ const { la1, la2 } = this.getLinkData(l);
+ const startTime = stack?.anchorStart(la1) || stack?.anchorStart(la2);
+ const endTime = stack?.anchorEnd(la1) || stack?.anchorEnd(la2);
+ if (startTime !== undefined) {
+ if (!this.layoutDoc.dontAutoPlayFollowedLinks)
+ endTime
+ ? this.playFrom(startTime, endTime)
+ : this.playFrom(startTime);
+ else
+ this._ele!.currentTime = this.layoutDoc._currentTimecode =
+ startTime;
+ }
+ });
}
- else {
- this.links.filter(l => l.anchor1 === link || l.anchor2 === link).forEach(l => {
- const { la1, la2 } = this.getLinkData(l);
- const startTime = stack?.anchorStart(la1) || stack?.anchorStart(la2);
- const endTime = stack?.anchorEnd(la1) || stack?.anchorEnd(la2);
- if (startTime !== undefined) {
- if (!this.layoutDoc.dontAutoPlayFollowedLinks) endTime ? this.playFrom(startTime, endTime) : this.playFrom(startTime);
- else this._ele!.currentTime = this.layoutDoc._currentTimecode = startTime;
- }
- });
+ };
+
+ // shows trim controls
+ @action
+ startTrim = () => {
+ if (!this.duration) {
+ this.timecodeChanged();
+ }
+ if (this.mediaState === "playing") {
+ this.Pause();
+ }
+ this._trimming = true;
+ };
+
+ // hides trim controls and displays new clip
+ @action
+ finishTrim = () => {
+ if (this.mediaState === "playing") {
+ this.Pause();
}
+ this.layoutDoc.clipStart = this._trimStart;
+ this.layoutDoc.clipEnd = this._trimEnd;
+ this._trimming = false;
+ this.setAnchorTime(Math.max(Math.min(this._trimEnd, this._ele!.currentTime), this._trimStart));
+ };
+
+ @action
+ setStartTrim = (newStart: number) => {
+ this._trimStart = newStart;
+ }
+
+ @action
+ setEndTrim = (newEnd: number) => {
+ this._trimEnd = newEnd;
}
isActiveChild = () => this._isAnyChildContentActive;
- timelineWhenChildContentsActiveChanged = (isActive: boolean) => this.props.whenChildContentsActiveChanged(runInAction(() => this._isAnyChildContentActive = isActive));
- timelineScreenToLocal = () => this.props.ScreenToLocalTransform().translate(-AudioBox.playheadWidth, -(100 - this.heightPercent) / 200 * this.props.PanelHeight());
- setAnchorTime = (time: number) => this._ele!.currentTime = this.layoutDoc._currentTimecode = time;
- timelineHeight = () => this.props.PanelHeight() * this.heightPercent / 100 * this.heightPercent / 100; // panelHeight * heightPercent is player height. * heightPercent is timeline height (as per css inline)
+ timelineWhenChildContentsActiveChanged = (isActive: boolean) =>
+ this.props.whenChildContentsActiveChanged(
+ runInAction(() => (this._isAnyChildContentActive = isActive))
+ );
+ timelineScreenToLocal = () =>
+ this.props
+ .ScreenToLocalTransform()
+ .translate(
+ -AudioBox.playheadWidth,
+ (-(100 - this.heightPercent) / 200) * this.props.PanelHeight()
+ );
+ setAnchorTime = (time: number) => {
+ (this._ele!.currentTime = this.layoutDoc._currentTimecode = time);
+ }
+
+ timelineHeight = () =>
+ (((this.props.PanelHeight() * this.heightPercent) / 100) *
+ this.heightPercent) /
+ 100; // panelHeight * heightPercent is player height. * heightPercent is timeline height (as per css inline)
timelineWidth = () => this.props.PanelWidth() - AudioBox.playheadWidth;
@computed get renderTimeline() {
- return <CollectionStackedTimeline ref={this._stackedTimeline} {...this.props}
- fieldKey={this.annotationKey}
- dictationKey={this.fieldKey + "-dictation"}
- mediaPath={this.path}
- renderDepth={this.props.renderDepth + 1}
- startTag={"_timecodeToShow" /* audioStart */}
- endTag={"_timecodeToHide" /* audioEnd */}
- focus={DocUtils.DefaultFocus}
- bringToFront={emptyFunction}
- CollectionView={undefined}
- duration={this.duration}
- playFrom={this.playFrom}
- setTime={this.setAnchorTime}
- playing={this.playing}
- whenChildContentsActiveChanged={this.timelineWhenChildContentsActiveChanged}
- removeDocument={this.removeDocument}
- ScreenToLocalTransform={this.timelineScreenToLocal}
- Play={this.Play}
- Pause={this.Pause}
- isContentActive={this.isContentActive}
- playLink={this.playLink}
- PanelWidth={this.timelineWidth}
- PanelHeight={this.timelineHeight}
- />;
+ return (
+ <CollectionStackedTimeline
+ ref={this._stackedTimeline}
+ {...this.props}
+ fieldKey={this.annotationKey}
+ dictationKey={this.fieldKey + "-dictation"}
+ mediaPath={this.path}
+ renderDepth={this.props.renderDepth + 1}
+ startTag={"_timecodeToShow" /* audioStart */}
+ endTag={"_timecodeToHide" /* audioEnd */}
+ focus={DocUtils.DefaultFocus}
+ bringToFront={emptyFunction}
+ CollectionView={undefined}
+ duration={this.duration}
+ playFrom={this.playFrom}
+ setTime={this.setAnchorTime}
+ playing={this.playing}
+ whenChildContentsActiveChanged={
+ this.timelineWhenChildContentsActiveChanged
+ }
+ removeDocument={this.removeDocument}
+ ScreenToLocalTransform={this.timelineScreenToLocal}
+ Play={this.Play}
+ Pause={this.Pause}
+ isContentActive={this.isContentActive}
+ playLink={this.playLink}
+ PanelWidth={this.timelineWidth}
+ PanelHeight={this.timelineHeight}
+ trimming={this._trimming}
+ trimStart={this._trimStart}
+ trimEnd={this._trimEnd}
+ trimDuration={this.trimDuration}
+ setStartTrim={this.setStartTrim}
+ setEndTrim={this.setEndTrim}
+ />
+ );
}
render() {
- const interactive = SnappingManager.GetIsDragging() || this.isContentActive() ? "-interactive" : "";
- return <div className="audiobox-container"
- onContextMenu={this.specificContextMenu}
- onClick={!this.path && !this._recorder ? this.recordAudioAnnotation : undefined}
- style={{ pointerEvents: this.props.layerProvider?.(this.layoutDoc) === false ? "none" : undefined }}>
- {!this.path ?
- <div className="audiobox-buttons">
- <div className="audiobox-dictation" onClick={this.onFile}>
- <FontAwesomeIcon style={{ width: "30px", background: !this.layoutDoc.dontAutoPlayFollowedLinks ? "yellow" : "rgba(0,0,0,0)" }} icon="file-alt" size={this.props.PanelHeight() < 36 ? "1x" : "2x"} />
+ const interactive =
+ SnappingManager.GetIsDragging() || this.isContentActive()
+ ? "-interactive"
+ : "";
+ return (
+ <div
+ className="audiobox-container"
+ onContextMenu={this.specificContextMenu}
+ onClick={
+ !this.path && !this._recorder ? this.recordAudioAnnotation : undefined
+ }
+ style={{
+ pointerEvents:
+ this.props.layerProvider?.(this.layoutDoc) === false
+ ? "none"
+ : undefined,
+ }}
+ >
+ {!this.path ? (
+ <div className="audiobox-buttons">
+ <div className="audiobox-dictation" onClick={this.onFile}>
+ <FontAwesomeIcon
+ style={{
+ width: "30px",
+ background: !this.layoutDoc.dontAutoPlayFollowedLinks
+ ? Colors.LIGHT_BLUE
+ : "rgba(0,0,0,0)",
+ }}
+ icon="file-alt"
+ size={this.props.PanelHeight() < 36 ? "1x" : "2x"}
+ />
+ </div>
+ {this.mediaState === "recording" || this.mediaState === "paused" ? (
+ <div className="recording" onClick={(e) => e.stopPropagation()}>
+ <div className="recording-buttons" onClick={this.recordClick}>
+ <FontAwesomeIcon
+ icon={"stop"}
+ size={this.props.PanelHeight() < 36 ? "1x" : "2x"}
+ />
+ </div>
+ <div
+ className="recording-buttons"
+ onClick={this._paused ? this.recordPlay : this.recordPause}
+ >
+ <FontAwesomeIcon
+ icon={this._paused ? "play" : "pause"}
+ size={this.props.PanelHeight() < 36 ? "1x" : "2x"}
+ />
+ </div>
+ <div className="time">
+ {formatTime(
+ Math.round(NumCast(this.layoutDoc._currentTimecode))
+ )}
+ </div>
+ </div>
+ ) : (
+ <button
+ className={`audiobox-record${interactive}`}
+ style={{ backgroundColor: Colors.DARK_GRAY }}
+ >
+ RECORD
+ </button>
+ )}
</div>
- {this.mediaState === "recording" || this.mediaState === "paused" ?
- <div className="recording" onClick={e => e.stopPropagation()}>
- <div className="buttons" onClick={this.recordClick}>
- <FontAwesomeIcon icon={"stop"} size={this.props.PanelHeight() < 36 ? "1x" : "2x"} />
+ ) : (
+ <div
+ className="audiobox-controls"
+ style={{
+ pointerEvents:
+ this._isAnyChildContentActive || this.isContentActive()
+ ? "all"
+ : "none",
+ }}
+ >
+ <div className="audiobox-dictation" />
+ <div
+ className="audiobox-player"
+ style={{ height: `${AudioBox.heightPercent}%` }}
+ >
+ <div
+ className="audiobox-buttons"
+ title={this.mediaState === "paused" ? "play" : "pause"}
+ onClick={this.mediaState === "paused" ? this.Play : this.Pause}
+ >
+ {" "}
+ <FontAwesomeIcon
+ icon={this.mediaState === "paused" ? "play" : "pause"}
+ size={"1x"}
+ />
</div>
- <div className="buttons" onClick={this._paused ? this.recordPlay : this.recordPause}>
- <FontAwesomeIcon icon={this._paused ? "play" : "pause"} size={this.props.PanelHeight() < 36 ? "1x" : "2x"} />
+ <div
+ className="audiobox-buttons"
+ title={this._trimming ? "finish" : "trim"}
+ onClick={this._trimming ? this.finishTrim : this.startTrim}
+ >
+ <FontAwesomeIcon
+ icon={this._trimming ? "check" : "cut"}
+ size={"1x"}
+ />
+ </div>
+ <div
+ className="audiobox-timeline"
+ style={{
+ top: 0,
+ height: `100%`,
+ left: AudioBox.playheadWidth,
+ width: `calc(100% - ${AudioBox.playheadWidth}px)`,
+ background: "white",
+ }}
+ >
+ {this.renderTimeline}
+ </div>
+ {this.audio}
+ <div className="audioBox-current-time">
+ {this._trimming ?
+ formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode)))
+ : formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this._trimStart)))}
+ </div>
+ <div className="audioBox-total-time">
+ {this._trimming || !this._trimEnd ?
+ formatTime(Math.round(NumCast(this.duration)))
+ : formatTime(Math.round(NumCast(this.trimDuration)))}
</div>
- <div className="time">{formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode)))}</div>
- </div>
- :
- <button className={`audiobox-record${interactive}`} style={{ backgroundColor: "black" }}>
- RECORD
- </button>}
- </div> :
- <div className="audiobox-controls" style={{ pointerEvents: this._isAnyChildContentActive || this.isContentActive() ? "all" : "none" }} >
- <div className="audiobox-dictation" />
- <div className="audiobox-player" style={{ height: `${AudioBox.heightPercent}%` }} >
- <div className="audiobox-playhead" style={{ width: AudioBox.playheadWidth }} title={this.mediaState === "paused" ? "play" : "pause"} onClick={this.Play}> <FontAwesomeIcon style={{ width: "100%", position: "absolute", left: "0px", top: "5px", borderWidth: "thin", borderColor: "white" }} icon={this.mediaState === "paused" ? "play" : "pause"} size={"1x"} /></div>
- <div className="audiobox-timeline" style={{ top: 0, height: `100%`, left: AudioBox.playheadWidth, width: `calc(100% - ${AudioBox.playheadWidth}px)`, background: "white" }}>
- {this.renderTimeline}
- </div>
- {this.audio}
- <div className="audioBox-current-time">
- {formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode)))}
- </div>
- <div className="audioBox-total-time">
- {formatTime(Math.round(this.duration))}
</div>
</div>
- </div>
- }
- </div>;
+ )}
+ </div>
+ );
}
-} \ No newline at end of file
+}
diff --git a/src/client/views/nodes/DocumentView.tsx b/src/client/views/nodes/DocumentView.tsx
index ee3266647..2bd79df64 100644
--- a/src/client/views/nodes/DocumentView.tsx
+++ b/src/client/views/nodes/DocumentView.tsx
@@ -91,6 +91,7 @@ export interface DocComponentView {
setFocus?: () => void;
fieldKey?: string;
annotationKey?: string;
+ getTitle?: () => string;
}
export interface DocumentViewSharedProps {
renderDepth: number;
diff --git a/src/client/views/nodes/LabelBox.tsx b/src/client/views/nodes/LabelBox.tsx
index 6a7793ff0..8d665b8a6 100644
--- a/src/client/views/nodes/LabelBox.tsx
+++ b/src/client/views/nodes/LabelBox.tsx
@@ -20,11 +20,26 @@ const LabelSchema = createSchema({});
type LabelDocument = makeInterface<[typeof LabelSchema, typeof documentSchema]>;
const LabelDocument = makeInterface(LabelSchema, documentSchema);
+export interface LabelBoxProps {
+ label?: string
+}
+
@observer
-export class LabelBox extends ViewBoxBaseComponent<FieldViewProps, LabelDocument>(LabelDocument) {
+export class LabelBox extends ViewBoxBaseComponent<(FieldViewProps & LabelBoxProps), LabelDocument>(LabelDocument) {
public static LayoutString(fieldKey: string) { return FieldView.LayoutString(LabelBox, fieldKey); }
+ public static LayoutStringWithTitle(fieldType: { name: string }, fieldStr: string, label: string) {
+ return `<${fieldType.name} fieldKey={'${fieldStr}'} label={'${label}'} {...props} />`; //e.g., "<ImageBox {...props} fieldKey={"data} />"
+ }
private dropDisposer?: DragManager.DragDropDisposer;
+ componentDidMount() {
+ this.props.setContentView?.(this);
+ }
+
+ getTitle() {
+ return this.props.label || "";
+ }
+
protected createDropTarget = (ele: HTMLDivElement) => {
this.dropDisposer?.();
if (ele) {
@@ -65,8 +80,8 @@ export class LabelBox extends ViewBoxBaseComponent<FieldViewProps, LabelDocument
render() {
const params = Cast(this.paramsDoc["onClick-paramFieldKeys"], listSpec("string"), []);
const missingParams = params?.filter(p => !this.paramsDoc[p]);
- params?.map(p => DocListCast(this.paramsDoc[p])); // bcz: really hacky form of prefetching ...
- const label = typeof this.rootDoc[this.fieldKey] === "string" ? StrCast(this.rootDoc[this.fieldKey]) : StrCast(this.rootDoc.title);
+ params?.map(p => DocListCast(this.paramsDoc[p])); // bcz: really hacky form of prefetching ...
+ const label = this.props.label ? this.props.label : typeof this.rootDoc[this.fieldKey] === "string" ? StrCast(this.rootDoc[this.fieldKey]) : StrCast(this.rootDoc.title);
return (
<div className="labelBox-outerDiv"
onMouseLeave={action(() => this._mouseOver = false)}