diff options
-rw-r--r-- | src/client/documents/Documents.ts | 4 | ||||
-rw-r--r-- | src/client/views/SidebarAnnos.tsx | 1 | ||||
-rw-r--r-- | src/client/views/collections/CollectionMenu.tsx | 3 | ||||
-rw-r--r-- | src/client/views/collections/CollectionStackedTimeline.tsx | 4 | ||||
-rw-r--r-- | src/client/views/nodes/AudioBox.tsx | 56 | ||||
-rw-r--r-- | src/client/views/nodes/ScreenshotBox.tsx | 3 | ||||
-rw-r--r-- | src/client/views/nodes/formattedText/FormattedTextBox.tsx | 9 |
7 files changed, 63 insertions, 17 deletions
diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index e413d4389..90c090f29 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -32,7 +32,7 @@ import { ContextMenu } from '../views/ContextMenu'; import { ContextMenuProps } from '../views/ContextMenuItem'; import { DFLT_IMAGE_NATIVE_DIM } from '../views/global/globalCssVariables.scss'; import { ActiveArrowEnd, ActiveArrowStart, ActiveDash, ActiveFillColor, ActiveInkBezierApprox, ActiveInkColor, ActiveInkWidth, ActiveIsInkMask, InkingStroke } from '../views/InkingStroke'; -import { AudioBox } from '../views/nodes/AudioBox'; +import { AudioBox, media_state } from '../views/nodes/AudioBox'; import { ColorBox } from '../views/nodes/ColorBox'; import { ComparisonBox } from '../views/nodes/ComparisonBox'; import { DataVizBox } from '../views/nodes/DataVizBox/DataVizBox'; @@ -269,7 +269,7 @@ export class DocumentOptions { _label_maxFontSize?: NUMt = new NumInfo('maximum font size for labelBoxes', false); stroke_width?: NUMt = new NumInfo('width of an ink stroke', false); icon_label?: STRt = new StrInfo('label to use for a fontIcon doc (otherwise, the title is used)', false); - mediaState?: STRt = new StrInfo('status of audio/video media document: "pendingRecording", "recording", "paused", "playing"', false); + mediaState?: STRt = new StrInfo(`status of audio/video media document: ${media_state.PendingRecording}, ${media_state.Recording}, ${media_state.Paused}, ${media_state.Playing}`, false); recording?: BOOLt = new BoolInfo('whether WebCam is recording or not'); autoPlayAnchors?: BOOLt = new BoolInfo('whether to play audio/video when an anchor is clicked in a stackedTimeline.'); dontPlayLinkOnSelect?: BOOLt = new BoolInfo('whether an audio/video should start playing when a link is followed to it.'); diff --git a/src/client/views/SidebarAnnos.tsx b/src/client/views/SidebarAnnos.tsx index f3452c780..ff347d65f 100644 --- a/src/client/views/SidebarAnnos.tsx +++ b/src/client/views/SidebarAnnos.tsx @@ -79,7 +79,6 @@ export class SidebarAnnos extends React.Component<FieldViewProps & ExtraProps> { _layout_autoHeight: true, _text_fontSize: StrCast(Doc.UserDoc().fontSize), _text_fontFamily: StrCast(Doc.UserDoc().fontFamily), - target: 'HELLO' as any, }); FormattedTextBox.SelectOnLoad = target[Id]; FormattedTextBox.DontSelectInitialText = true; diff --git a/src/client/views/collections/CollectionMenu.tsx b/src/client/views/collections/CollectionMenu.tsx index 5c9dd2058..78ab0797b 100644 --- a/src/client/views/collections/CollectionMenu.tsx +++ b/src/client/views/collections/CollectionMenu.tsx @@ -41,6 +41,7 @@ import { COLLECTION_BORDER_WIDTH } from './CollectionView'; import { TabDocView } from './TabDocView'; import { CollectionFreeFormView } from './collectionFreeForm'; import { CollectionLinearView } from './collectionLinear'; +import { media_state } from '../nodes/AudioBox'; interface CollectionMenuProps { panelHeight: () => number; @@ -579,7 +580,7 @@ export class CollectionViewBaseChrome extends React.Component<CollectionViewMenu @undoBatch @action startRecording = () => { - const doc = Docs.Create.ScreenshotDocument({ title: 'screen recording', _layout_fitWidth: true, _width: 400, _height: 200, mediaState: 'pendingRecording' }); + const doc = Docs.Create.ScreenshotDocument({ title: 'screen recording', _layout_fitWidth: true, _width: 400, _height: 200, mediaState: media_state.PendingRecording }); CollectionDockingView.AddSplit(doc, OpenWhereMod.right); }; diff --git a/src/client/views/collections/CollectionStackedTimeline.tsx b/src/client/views/collections/CollectionStackedTimeline.tsx index 0a5a80936..ad3160a08 100644 --- a/src/client/views/collections/CollectionStackedTimeline.tsx +++ b/src/client/views/collections/CollectionStackedTimeline.tsx @@ -161,6 +161,10 @@ export class CollectionStackedTimeline extends CollectionSubView<CollectionStack this.layoutDoc.clipEnd = this.trimEnd; this._trimming = TrimScope.None; } + @action + public CancelTrimming() { + this._trimming = TrimScope.None; + } @action public setZoom(zoom: number) { diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 50b2432d2..8d80f1364 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -1,9 +1,10 @@ import React = require('react'); import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; +import { Tooltip } from '@material-ui/core'; import { action, computed, IReactionDisposer, observable, runInAction } from 'mobx'; import { observer } from 'mobx-react'; import { DateField } from '../../../fields/DateField'; -import { Doc, DocListCast } from '../../../fields/Doc'; +import { Doc } from '../../../fields/Doc'; import { ComputedField } from '../../../fields/ScriptField'; import { Cast, DateCast, NumCast } from '../../../fields/Types'; import { AudioField, nullAudio } from '../../../fields/URLField'; @@ -18,9 +19,9 @@ import { ContextMenu } from '../ContextMenu'; import { ContextMenuProps } from '../ContextMenuItem'; import { ViewBoxAnnotatableComponent, ViewBoxAnnotatableProps } from '../DocComponent'; import './AudioBox.scss'; +import { DocFocusOptions } from './DocumentView'; import { FieldView, FieldViewProps } from './FieldView'; import { PinProps, PresBox } from './trails'; -import { DocFocusOptions } from './DocumentView'; /** * AudioBox @@ -67,7 +68,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp _stream: MediaStream | undefined; // passed to MediaRecorder, records device input audio _play: any = null; // timeout for playback - @observable _stackedTimeline: any; // CollectionStackedTimeline ref + @observable _stackedTimeline: CollectionStackedTimeline | null | undefined; // CollectionStackedTimeline ref @observable _finished: boolean = false; // has playback reached end of clip @observable _volume: number = 1; @observable _muted: boolean = false; @@ -477,8 +478,34 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp }; // for trim button, double click displays full clip, single displays curr trim bounds + onResetPointerDown = (e: React.PointerEvent) => { + e.stopPropagation(); + this.timeline && + setupMoveUpEvents( + this, + e, + returnFalse, + returnFalse, + action(e => { + if (this.timeline?.IsTrimming !== TrimScope.None) { + this.timeline?.CancelTrimming(); + } else { + this.beginEndtime = this.timeline?.trimEnd; + this.beginStarttime = this.timeline?.trimStart; + this.startTrim(TrimScope.All); + } + }) + ); + }; + + beginEndtime: number | undefined; + beginStarttime: number | undefined; + + // for trim button, double click displays full clip, single displays curr trim bounds onClipPointerDown = (e: React.PointerEvent) => { e.stopPropagation(); + this.beginEndtime = this.timeline?.trimEnd; + this.beginStarttime = this.timeline?.trimStart; this.timeline && setupMoveUpEvents( this, @@ -529,7 +556,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp r, (e, de) => { const [xp, yp] = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); - de.complete.docDragData && this.timeline.internalDocDrop(e, de, de.complete.docDragData, xp); + de.complete.docDragData && this.timeline?.internalDocDrop(e, de, de.complete.docDragData, xp); }, this.layoutDoc, undefined @@ -591,9 +618,22 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp </div> {!this.miniPlayer && ( - <div className="audiobox-button" title={this.timeline?.IsTrimming !== TrimScope.None ? 'finish' : 'trim'} onPointerDown={this.onClipPointerDown}> - <FontAwesomeIcon icon={this.timeline?.IsTrimming !== TrimScope.None ? 'check' : 'cut'} size={'1x'} /> - </div> + <> + <Tooltip title={<>trim audio</>}> + <div className="audiobox-button" onPointerDown={this.onClipPointerDown}> + <FontAwesomeIcon icon={this.timeline?.IsTrimming !== TrimScope.None ? 'check' : 'cut'} size={'1x'} /> + </div> + </Tooltip> + {this.timeline?.IsTrimming == TrimScope.None && !NumCast(this.layoutDoc.clipStart) && NumCast(this.layoutDoc.clipEnd) === this.rawDuration ? ( + <></> + ) : ( + <Tooltip title={<>{this.timeline?.IsTrimming !== TrimScope.None ? 'Cancel trimming' : 'Edit original timeline'}</>}> + <div className="audiobox-button" onPointerDown={this.onResetPointerDown}> + <FontAwesomeIcon icon={this.timeline?.IsTrimming !== TrimScope.None ? 'cancel' : 'arrows-left-right'} size={'1x'} /> + </div> + </Tooltip> + )} + </> )} </div> <div className="controls-right"> @@ -658,7 +698,7 @@ export class AudioBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp @computed get renderTimeline() { return ( <CollectionStackedTimeline - ref={action((r: any) => (this._stackedTimeline = r))} + ref={action((r: CollectionStackedTimeline | null) => (this._stackedTimeline = r))} {...this.props} CollectionFreeFormDocumentView={undefined} dataFieldKey={this.fieldKey} diff --git a/src/client/views/nodes/ScreenshotBox.tsx b/src/client/views/nodes/ScreenshotBox.tsx index 26ad8b7bb..ebb8a3374 100644 --- a/src/client/views/nodes/ScreenshotBox.tsx +++ b/src/client/views/nodes/ScreenshotBox.tsx @@ -27,6 +27,7 @@ import { FormattedTextBox } from './formattedText/FormattedTextBox'; import './ScreenshotBox.scss'; import { VideoBox } from './VideoBox'; import { TrackMovements } from '../../util/TrackMovements'; +import { media_state } from './AudioBox'; declare class MediaRecorder { constructor(e: any, options?: any); // whatever MediaRecorder has @@ -181,7 +182,7 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatabl ref={r => { this._videoRef = r; setTimeout(() => { - if (this.rootDoc.mediaState === 'pendingRecording' && this._videoRef) { + if (this.rootDoc.mediaState === media_state.PendingRecording && this._videoRef) { this.toggleRecording(); } }, 100); diff --git a/src/client/views/nodes/formattedText/FormattedTextBox.tsx b/src/client/views/nodes/formattedText/FormattedTextBox.tsx index 58b824159..6fdde4b6b 100644 --- a/src/client/views/nodes/formattedText/FormattedTextBox.tsx +++ b/src/client/views/nodes/formattedText/FormattedTextBox.tsx @@ -71,6 +71,7 @@ import { schema } from './schema_rts'; import { SummaryView } from './SummaryView'; import applyDevTools = require('prosemirror-dev-tools'); import React = require('react'); +import { media_state } from '../AudioBox'; const translateGoogleApi = require('translate-google-api'); export const GoogleRef = 'googleDocId'; type PullHandler = (exportState: Opt<GoogleApiClientUtils.Docs.ImportResult>, dataDoc: Doc) => void; @@ -154,10 +155,10 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps return this.titleHeight + NumCast(this.layoutDoc._layout_autoHeightMargins); } @computed get _recordingDictation() { - return this.dataDoc?.mediaState === 'recording'; + return this.dataDoc?.mediaState === media_state.Recording; } set _recordingDictation(value) { - !this.dataDoc[`${this.fieldKey}_recordingSource`] && (this.dataDoc.mediaState = value ? 'recording' : undefined); + !this.dataDoc[`${this.fieldKey}_recordingSource`] && (this.dataDoc.mediaState = value ? media_state.Recording : undefined); } @computed get config() { this._keymap = buildKeymap(schema, this.props); @@ -271,7 +272,7 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps if (target) { anchor.followLinkAudio = true; let stopFunc: any; - Doc.GetProto(target).mediaState = 'recording'; + Doc.GetProto(target).mediaState = media_state.Recording; Doc.GetProto(target).audioAnnoState = 'recording'; DocumentViewInternal.recordAudioAnnotation(Doc.GetProto(target), Doc.LayoutFieldKey(target), stop => (stopFunc = stop)); let reactionDisposer = reaction( @@ -389,7 +390,7 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps let link; LinkManager.Links(this.dataDoc).forEach((l, i) => { const anchor = (l.link_anchor_1 as Doc).annotationOn ? (l.link_anchor_1 as Doc) : (l.link_anchor_2 as Doc).annotationOn ? (l.link_anchor_2 as Doc) : undefined; - if (anchor && (anchor.annotationOn as Doc).mediaState === 'recording') { + if (anchor && (anchor.annotationOn as Doc).mediaState === media_state.Recording) { linkTime = NumCast(anchor._timecodeToShow /* audioStart */); linkAnchor = anchor; link = l; |