aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/client/views/AudioWaveform.tsx24
-rw-r--r--src/client/views/collections/CollectionStackedTimeline.tsx13
-rw-r--r--src/client/views/nodes/AudioBox.tsx78
3 files changed, 56 insertions, 59 deletions
diff --git a/src/client/views/AudioWaveform.tsx b/src/client/views/AudioWaveform.tsx
index 7d83ea3dc..f7b117130 100644
--- a/src/client/views/AudioWaveform.tsx
+++ b/src/client/views/AudioWaveform.tsx
@@ -32,27 +32,25 @@ export class AudioWaveform extends React.Component<AudioWaveformProps> {
@computed get clipStart() { return this.props.clipStart; }
@computed get clipEnd() { return this.props.clipEnd; }
- audioBucketField = (start: number, end: number) => { return "audioBuckets-" + start.toFixed(2) + "-" + end.toFixed(2); }
+ audioBucketField = (start: number, end: number) => { return "audioBuckets/" + start.toFixed(2).replace(".", "_") + "/" + end.toFixed(2).replace(".", "_"); }
@computed get audioBuckets() { return Cast(this.props.layoutDoc[this.audioBucketField(this.clipStart, this.clipEnd)], listSpec("number"), []); }
componentWillUnmount() {
this._disposer?.();
}
componentDidMount() {
- this._disposer = reaction(() => [this.clipStart, this.clipEnd, this.audioBuckets.length],
- (range) => {
- if (range[2] !== AudioWaveform.NUMBER_OF_BUCKETS) {
- if (!this.props.layoutDoc[this.audioBucketField(range[0], range[1])]) {
- // setting these values here serves as a "lock" to prevent multiple attempts to create the waveform at nerly the same time.
- this.props.layoutDoc[this.audioBucketField(range[0], range[1])] = new List<number>(numberRange(AudioWaveform.NUMBER_OF_BUCKETS));
- setTimeout(this.createWaveformBuckets);
- }
+ this._disposer = reaction(() => ({ clipStart: this.clipStart, clipEnd: this.clipEnd, fieldKey: this.audioBucketField(this.clipStart, this.clipEnd) }),
+ ({ clipStart, clipEnd, fieldKey }) => {
+ if (!this.props.layoutDoc[fieldKey]) {
+ // setting these values here serves as a "lock" to prevent multiple attempts to create the waveform at nerly the same time.
+ this.props.layoutDoc[fieldKey] = new List<number>(numberRange(AudioWaveform.NUMBER_OF_BUCKETS));
+ setTimeout(() => this.createWaveformBuckets(fieldKey, clipStart, clipEnd));
}
}, { fireImmediately: true });
}
// decodes the audio file into peaks for generating the waveform
- createWaveformBuckets = async () => {
+ createWaveformBuckets = async (fieldKey: string, clipStart: number, clipEnd: number) => {
axios({ url: this.props.mediaPath, responseType: "arraybuffer" }).then(
(response) => {
const context = new window.AudioContext();
@@ -60,8 +58,8 @@ export class AudioWaveform extends React.Component<AudioWaveformProps> {
response.data,
action((buffer) => {
const rawDecodedAudioData = buffer.getChannelData(0);
- const startInd = this.clipStart / this.props.rawDuration;
- const endInd = this.clipEnd / this.props.rawDuration;
+ const startInd = clipStart / this.props.rawDuration;
+ const endInd = clipEnd / this.props.rawDuration;
const decodedAudioData = rawDecodedAudioData.slice(Math.floor(startInd * rawDecodedAudioData.length), Math.floor(endInd * rawDecodedAudioData.length));
const bucketDataSize = Math.floor(
@@ -78,7 +76,7 @@ export class AudioWaveform extends React.Component<AudioWaveformProps> {
0
) / 2
);
- this.props.layoutDoc[this.audioBucketField(this.clipStart, this.clipEnd)] = new List<number>(bucketList);
+ this.props.layoutDoc[fieldKey] = new List<number>(bucketList);
})
);
}
diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx
index e00e66295..7859d3c3f 100644
--- a/src/client/views/collections/CollectionStackedTimeline.tsx
+++ b/src/client/views/collections/CollectionStackedTimeline.tsx
@@ -50,7 +50,6 @@ import { DragManager } from "../../util/DragManager";
type PanZoomDocument = makeInterface<[]>;
const PanZoomDocument = makeInterface();
export type CollectionStackedTimelineProps = {
- clipDuration: number;
Play: () => void;
Pause: () => void;
playLink: (linkDoc: Doc) => void;
@@ -65,6 +64,7 @@ export type CollectionStackedTimelineProps = {
trimming: boolean;
clipStart: number;
clipEnd: number;
+ clipDuration: number;
trimStart: () => number;
trimEnd: () => number;
trimDuration: () => number;
@@ -165,8 +165,8 @@ export class CollectionStackedTimeline extends CollectionSubView<
}
toTimeline = (screen_delta: number, width: number) => {
return Math.max(
- this.trimStart,
- Math.min(this.trimEnd, (screen_delta / width) * this.props.trimDuration() + this.trimStart));
+ this.props.clipStart,
+ Math.min(this.props.clipEnd, (screen_delta / width) * this.props.clipDuration + this.props.clipStart));
}
rangeClickScript = () => CollectionStackedTimeline.RangeScript;
@@ -282,12 +282,7 @@ export class CollectionStackedTimeline extends CollectionSubView<
this.props.isSelected(true) || this.props.isContentActive(),
undefined,
() => {
- !wasPlaying &&
- (this.props.trimming && this.clipDuration ?
- this.props.setTime(((clientX - rect.x) / rect.width) * this.clipDuration)
- :
- this.props.setTime(((clientX - rect.x) / rect.width) * this.props.trimDuration() + this.trimStart)
- );
+ !wasPlaying && this.props.setTime(((clientX - rect.x) / rect.width) * this.clipDuration + this.props.clipStart);
}
);
}
diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx
index 6a25ffaeb..bfc15cea8 100644
--- a/src/client/views/nodes/AudioBox.tsx
+++ b/src/client/views/nodes/AudioBox.tsx
@@ -16,7 +16,7 @@ import { makeInterface } from "../../../fields/Schema";
import { ComputedField } from "../../../fields/ScriptField";
import { Cast, NumCast } from "../../../fields/Types";
import { AudioField, nullAudio } from "../../../fields/URLField";
-import { emptyFunction, formatTime, OmitKeys } from "../../../Utils";
+import { emptyFunction, formatTime, OmitKeys, setupMoveUpEvents, returnFalse } from "../../../Utils";
import { DocUtils } from "../../documents/Documents";
import { Networking } from "../../Network";
import { CurrentUserUtils } from "../../util/CurrentUserUtils";
@@ -48,13 +48,14 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
ViewBoxAnnotatableProps & FieldViewProps,
AudioDocument
>(AudioDocument) {
- public static LayoutString(fieldKey: string) {
- return FieldView.LayoutString(AudioBox, fieldKey);
- }
+ public static LayoutString(fieldKey: string) { return FieldView.LayoutString(AudioBox, fieldKey); }
public static Enabled = false;
static playheadWidth = 40; // width of playhead
static heightPercent = 75; // height of timeline in percent of height of audioBox.
static Instance: AudioBox;
+ static ScopeAll = 2;
+ static ScopeClip = 1;
+ static ScopeNone = 0;
_disposers: { [name: string]: IReactionDisposer } = {};
_ele: HTMLAudioElement | null = null;
@@ -74,14 +75,19 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
@observable _position: number = 0;
@observable _waveHeight: Opt<number> = this.layoutDoc._height;
@observable _paused: boolean = false;
- @observable _trimming: boolean = false;
+ @observable _trimming: number = AudioBox.ScopeNone;
@observable _trimStart: number = NumCast(this.layoutDoc.clipStart);
@observable _trimEnd: number | undefined = Cast(this.layoutDoc.clipEnd, "number");
- @computed get clipStart() { return NumCast(this.layoutDoc.clipStart); }
- @computed get clipEnd() { return NumCast(this.layoutDoc.clipEnd, this.duration); }
- @computed get trimStart() { return this._trimming ? this._trimStart : NumCast(this.layoutDoc.clipStart); }
+ @computed get clipStart() { return this._trimming === AudioBox.ScopeAll ? 0 : NumCast(this.layoutDoc.clipStart); }
+ @computed get clipDuration() {
+ return this._trimming === AudioBox.ScopeAll ? NumCast(this.dataDoc[`${this.fieldKey}-duration`]) :
+ NumCast(this.layoutDoc.clipEnd, this.clipStart + NumCast(this.dataDoc[`${this.fieldKey}-duration`])) - this.clipStart;
+ }
+ @computed get clipEnd() { return this.clipStart + this.clipDuration; }
+ @computed get trimStart() { return this._trimming !== AudioBox.ScopeNone ? this._trimStart : NumCast(this.layoutDoc.clipStart); }
+ @computed get trimDuration() { return this.trimEnd - this.trimStart; }
@computed get trimEnd() {
- return this._trimming && this._trimEnd !== undefined ? this._trimEnd : NumCast(this.layoutDoc.clipEnd, this.duration);
+ return this._trimming !== AudioBox.ScopeNone && this._trimEnd !== undefined ? this._trimEnd : NumCast(this.layoutDoc.clipEnd, this.clipDuration);
}
@computed get mediaState():
@@ -113,13 +119,6 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
@computed get rawDuration() {
return NumCast(this.dataDoc[`${this.fieldKey}-duration`]);
}
- @computed get duration() {
- return NumCast(this.layoutDoc.clipEnd, NumCast(this.layoutDoc.clipStart) + NumCast(this.dataDoc[`${this.fieldKey}-duration`])) - NumCast(this.layoutDoc.clipStart);
- // NumCast(this.dataDoc[`${this.fieldKey}-duration`]);
- }
- @computed get trimDuration() {
- return this.trimEnd - this.trimStart;
- }
@computed get anchorDocs() {
return DocListCast(this.dataDoc[this.annotationKey]);
}
@@ -269,7 +268,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
this._ele.currentTime = start;
this._ele.play();
runInAction(() => (this.mediaState = "playing"));
- if (endTime !== this.duration) {
+ if (endTime !== this.clipDuration) {
this._play = setTimeout(
() => {
this._ended = fullPlay ? true : this._ended;
@@ -309,7 +308,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
const [{ result }] = await Networking.UploadFilesToServer(e.data);
if (!(result instanceof Error)) {
this.props.Document[this.props.fieldKey] = new AudioField(result.accessPaths.agnostic.client);
- if (this._trimEnd === undefined) this._trimEnd = this.duration;
+ if (this._trimEnd === undefined) this._trimEnd = this.clipDuration;
}
};
this._recordStart = new Date().getTime();
@@ -359,9 +358,9 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
this.dataDoc[this.fieldKey + "-duration"] =
(new Date().getTime() - this._recordStart - this.pauseTime) / 1000;
this.mediaState = "paused";
- this._trimEnd = this.duration;
+ this._trimEnd = this.clipDuration;
this.layoutDoc.clipStart = 0;
- this.layoutDoc.clipEnd = this.duration;
+ this.layoutDoc.clipEnd = this.clipDuration;
this._stream?.getAudioTracks()[0].stop();
const ind = DocUtils.ActiveRecordings.indexOf(this);
ind !== -1 && DocUtils.ActiveRecordings.splice(ind, 1);
@@ -378,7 +377,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
// for play button
Play = (e?: any) => {
let start;
- if (this._ended || this._ele!.currentTime === this.duration) {
+ if (this._ended || this._ele!.currentTime === this.clipDuration) {
start = NumCast(this.layoutDoc.clipStart);
this._ended = false;
}
@@ -494,11 +493,11 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
// shows trim controls
@action
- startTrim = () => {
+ startTrim = (scope: number) => {
if (this.mediaState === "playing") {
this.Pause();
}
- this._trimming = true;
+ this._trimming = scope;
}
// hides trim controls and displays new clip
@@ -510,7 +509,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
this.layoutDoc.clipStart = this.trimStart;
this.layoutDoc.clipEnd = this.trimEnd;
this.setAnchorTime(Math.max(Math.min(this.trimEnd, this._ele!.currentTime), this.trimStart));
- this._trimming = false;
+ this._trimming = AudioBox.ScopeNone;
});
@action
@@ -579,15 +578,15 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
rawDuration={this.rawDuration}
// this edits the entire waveform when trimming is activated
- clipStart={this._trimming ? 0 : this.clipStart}
- clipEnd={this._trimming ? this.rawDuration : this.clipEnd}
- clipDuration={this._trimming ? this.rawDuration : this.duration}
+ clipStart={this._trimming === AudioBox.ScopeAll ? 0 : this.clipStart}
+ clipEnd={this._trimming === AudioBox.ScopeAll ? this.rawDuration : this.clipEnd}
+ clipDuration={this._trimming === AudioBox.ScopeAll ? this.rawDuration : this.clipDuration}
// this edits just the current waveform clip when trimming is activated
// clipStart={this.clipStart}
// clipEnd={this.clipEnd}
// clipDuration={this.duration}
- trimming={this._trimming}
+ trimming={this._trimming !== AudioBox.ScopeNone}
trimStart={this.trimStartFunc}
trimEnd={this.trimEndFunc}
trimDuration={this.trimDurationFunc}
@@ -596,6 +595,15 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
/>
);
}
+ onClipPointerDown = (e: React.PointerEvent) => {
+ setupMoveUpEvents(this, e, returnFalse, returnFalse, action((e: PointerEvent, doubleTap?: boolean) => {
+ if (doubleTap) {
+ this.startTrim(AudioBox.ScopeAll);
+ } else {
+ this._trimming !== AudioBox.ScopeNone ? this.finishTrim() : this.startTrim(AudioBox.ScopeClip);
+ }
+ }));
+ }
render() {
const interactive =
@@ -697,11 +705,11 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
</div>
<div
className="audiobox-buttons"
- title={this._trimming ? "finish" : "trim"}
- onClick={this._trimming ? this.finishTrim : this.startTrim}
+ title={this._trimming !== AudioBox.ScopeNone ? "finish" : "trim"}
+ onPointerDown={this.onClipPointerDown}
>
<FontAwesomeIcon
- icon={this._trimming ? "check" : "cut"}
+ icon={this._trimming !== AudioBox.ScopeNone ? "check" : "cut"}
size={"1x"}
/>
</div>
@@ -719,14 +727,10 @@ export class AudioBox extends ViewBoxAnnotatableComponent<
</div>
{this.audio}
<div className="audioBox-current-time">
- {this._trimming ?
- formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode)))
- : formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this.trimStart)))}
+ {formatTime(Math.round(NumCast(this.layoutDoc._currentTimecode) - NumCast(this.clipStart)))}
</div>
<div className="audioBox-total-time">
- {this._trimming || !this._trimEnd ?
- formatTime(Math.round(NumCast(this.duration)))
- : formatTime(Math.round(NumCast(this.trimDuration)))}
+ {formatTime(Math.round(NumCast(this.clipDuration)))}
</div>
</div>
</div>