import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; import * as React from 'react'; // import { Canvas } from '@react-three/fiber'; import { computed, makeObservable, observable, runInAction } from 'mobx'; import { observer } from 'mobx-react'; // import { BufferAttribute, Camera, Vector2, Vector3 } from 'three'; import { returnFalse, returnOne, returnZero } from '../../../ClientUtils'; import { emptyFunction } from '../../../Utils'; import { DateField } from '../../../fields/DateField'; import { Doc } from '../../../fields/Doc'; import { DocData } from '../../../fields/DocSymbols'; import { Id } from '../../../fields/FieldSymbols'; import { ComputedField } from '../../../fields/ScriptField'; import { Cast, DocCast, NumCast } from '../../../fields/Types'; import { AudioField, VideoField } from '../../../fields/URLField'; import { TraceMobx } from '../../../fields/util'; import { Networking } from '../../Network'; import { DocUtils } from '../../documents/DocUtils'; import { DocumentType } from '../../documents/DocumentTypes'; import { Docs } from '../../documents/Documents'; import { CaptureManager } from '../../util/CaptureManager'; import { SettingsManager } from '../../util/SettingsManager'; import { Movement, TrackMovements } from '../../util/TrackMovements'; import { ContextMenu } from '../ContextMenu'; import { ViewBoxAnnotatableComponent } from '../DocComponent'; import { DocViewUtils } from '../DocViewUtils'; import { CollectionStackedTimeline } from '../collections/CollectionStackedTimeline'; import { CollectionFreeFormView } from '../collections/collectionFreeForm/CollectionFreeFormView'; import { mediaState } from './AudioBox'; import { FieldView, FieldViewProps } from './FieldView'; import './ScreenshotBox.scss'; import { VideoBox } from './VideoBox'; import { FormattedTextBox } from './formattedText/FormattedTextBox'; import { IconProp } from '@fortawesome/fontawesome-svg-core'; // declare class MediaRecorder { // constructor(e: any, options?: any); // whatever MediaRecorder has // } // interface VideoTileProps { // raised: { coord: Vector2, off: Vector3 }[]; // setRaised: (r: { coord: Vector2, off: Vector3 }[]) => void; // x: number; // y: number; // doc: Doc; // color: string; // } // @observer // export class VideoTile extends React.Component { // @observable _videoRef: HTMLVideoElement | undefined = undefined; // _mesh: any = undefined; // render() { // const topLeft = [this._props.x, this._props.y]; // const raised = this._props.raised; // const find = (raised: { coord: Vector2, off: Vector3 }[], what: Vector2) => raised.find(r => r.coord.x === what.x && r.coord.y === what.y); // const tl1 = find(raised, new Vector2(topLeft[0], topLeft[1] + 1)); // const tl2 = find(raised, new Vector2(topLeft[0] + 1, topLeft[1] + 1)); // const tl3 = find(raised, new Vector2(topLeft[0] + 1, topLeft[1])); // const tl4 = find(raised, new Vector2(topLeft[0], topLeft[1])); // const quad_indices = [0, 2, 1, 0, 3, 2]; // const quad_uvs = [0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0]; // const quad_normals = [0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1,]; // const quad_vertices = // [ // topLeft[0] - 0.0 + (tl1?.off.x || 0), topLeft[1] + 1.0 + (tl1?.off.y || 0), 0.0 + (tl1?.off.z || 0), // topLeft[0] + 1.0 + (tl2?.off.x || 0), topLeft[1] + 1.0 + (tl2?.off.y || 0), 0.0 + (tl2?.off.z || 0), // topLeft[0] + 1.0 + (tl3?.off.x || 0), topLeft[1] - 0.0 + (tl3?.off.y || 0), 0.0 + (tl3?.off.z || 0), // topLeft[0] - 0.0 + (tl4?.off.x || 0), topLeft[1] - 0.0 + (tl4?.off.y || 0), 0.0 + (tl4?.off.z || 0) // ]; // const vertices = new Float32Array(quad_vertices); // const normals = new Float32Array(quad_normals); // const uvs = new Float32Array(quad_uvs); // Each vertex has one uv coordinate for texture mapping // const indices = new Uint32Array(quad_indices); // Use the four vertices to draw the two triangles that make up the square. // const popOut = () => NumCast(this.Document.popOut); // const popOff = () => NumCast(this.Document.popOff); // return ( // { // this._props.setRaised([ // { coord: new Vector2(topLeft[0], topLeft[1]), off: new Vector3(-popOff(), -popOff(), popOut()) }, // { coord: new Vector2(topLeft[0] + 1, topLeft[1]), off: new Vector3(popOff(), -popOff(), popOut()) }, // { coord: new Vector2(topLeft[0], topLeft[1] + 1), off: new Vector3(-popOff(), popOff(), popOut()) }, // { coord: new Vector2(topLeft[0] + 1, topLeft[1] + 1), off: new Vector3(popOff(), popOff(), popOut()) } // ]); // if (!this._videoRef) { // (navigator.mediaDevices as any).getDisplayMedia({ video: true }).then(action((stream: any) => { // //const videoSettings = stream.getVideoTracks()[0].getSettings(); // this._videoRef = document.createElement("video"); // Object.assign(this._videoRef, { // srcObject: stream, // //height: videoSettings.height, // //width: videoSettings.width, // autoplay: true // }); // })); // } // })} ref={(r: any) => this._mesh = r}> // { // // itemSize = 3 because there are 3 values (components) per vertex // r?.setAttribute('position', new BufferAttribute(vertices, 3)); // r?.setAttribute('normal', new BufferAttribute(normals, 3)); // r?.setAttribute('uv', new BufferAttribute(uvs, 2)); // r?.setIndex(new BufferAttribute(indices, 1)); // }} /> // {!this._videoRef ? : // // // } // // ); // } // } @observer export class ScreenshotBox extends ViewBoxAnnotatableComponent() { public static LayoutString(fieldKey: string) { return FieldView.LayoutString(ScreenshotBox, fieldKey); } private _audioRec: MediaRecorder | undefined; private _videoRec: MediaRecorder | undefined; @observable private _videoRef: HTMLVideoElement | null = null; @observable _screenCapture = false; @computed get recordingStart() { return Cast(this.dataDoc[this._props.fieldKey + '_recordingStart'], DateField)?.date.getTime(); } constructor(props: FieldViewProps) { super(props); makeObservable(this); this.setupDictation(); } getAnchor = (addAsAnnotation: boolean) => { const startTime = Cast(this.layoutDoc._layout_currentTimecode, 'number', null) || (this._videoRec ? (Date.now() - (this.recordingStart || 0)) / 1000 : undefined); return CollectionStackedTimeline.createAnchor(this.Document, this.dataDoc, this.annotationKey, startTime, startTime === undefined ? undefined : startTime + 3, undefined, addAsAnnotation) || this.Document; }; videoLoad = () => { const aspect = (this._videoRef?.videoWidth || 0) / (this._videoRef?.videoHeight || 1); const nativeWidth = Doc.NativeWidth(this.layoutDoc); const nativeHeight = Doc.NativeHeight(this.layoutDoc); if (!nativeWidth || !nativeHeight) { if (!nativeWidth) Doc.SetNativeWidth(this.dataDoc, 1200); Doc.SetNativeHeight(this.dataDoc, (nativeWidth || 1200) / aspect); this.layoutDoc._height = NumCast(this.layoutDoc._width) / aspect; } }; componentDidMount() { this.dataDoc.nativeWidth = this.dataDoc.nativeHeight = 0; this._props.setContentViewBox?.(this); // this tells the DocumentView that this Box is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link. // this.layoutDoc.videoWall && reaction(() => ({ width: this._props.PanelWidth(), height: this._props.PanelHeight() }), // ({ width, height }) => { // if (this._camera) { // const angle = -Math.abs(1 - width / height); // const xz = [0, (this._numScreens - 2) / Math.abs(1 + angle)]; // this._camera.position.set(this._numScreens / 2 + xz[1] * Math.sin(angle), this._numScreens / 2, xz[1] * Math.cos(angle)); // this._camera.lookAt(this._numScreens / 2, this._numScreens / 2, 0); // (this._camera as any).updateProjectionMatrix(); // } // }); } componentWillUnmount() { const ind = DocViewUtils.ActiveRecordings.indexOf(this); ind !== -1 && DocViewUtils.ActiveRecordings.splice(ind, 1); } specificContextMenu = (): void => { const subitems = [{ description: 'Screen Capture', event: this.toggleRecording, icon: 'expand-arrows-alt' as IconProp }]; ContextMenu.Instance.addItem({ description: 'Options...', subitems, icon: 'video' }); }; @computed get content() { return ( ); } // _numScreens = 5; // _camera: Camera | undefined; // @observable _raised = [] as { coord: Vector2, off: Vector3 }[]; // @action setRaised = (r: { coord: Vector2, off: Vector3 }[]) => this._raised = r; @computed get threed() { // if (this.layoutDoc.videoWall) { // const screens: any[] = []; // const colors = ["yellow", "red", "orange", "brown", "maroon", "gray"]; // let count = 0; // numberRange(this._numScreens).forEach(x => numberRange(this._numScreens).forEach(y => screens.push( // ))); // return { // this._camera = props.camera; // props.camera.position.set(this._numScreens / 2, this._numScreens / 2, this._numScreens - 2); // props.camera.lookAt(this._numScreens / 2, this._numScreens / 2, 0); // }}> // {/* */} // // {screens} // ; // } return null; } Record = () => !this._screenCapture && this.toggleRecording(); Pause = () => this._screenCapture && this.toggleRecording(); toggleRecording = async () => { if (!this._screenCapture && this._videoRef) { this._audioRec = new MediaRecorder(await navigator.mediaDevices.getUserMedia({ audio: true })); const audChunks: Blob[] = []; this._audioRec.ondataavailable = e => audChunks.push(e.data); this._audioRec.onstop = async () => { const [{ result }] = await Networking.UploadFilesToServer(audChunks.map(file => ({ file }))); if (!(result instanceof Error)) { this.dataDoc[this._props.fieldKey + '_audio'] = new AudioField(result.accessPaths.agnostic.client); } }; this._videoRef.srcObject = await navigator.mediaDevices.getDisplayMedia({ video: true }); this._videoRec = new MediaRecorder(this._videoRef.srcObject); const vidChunks: Blob[] = []; this._videoRec.onstart = () => { if (this.dataDoc[this._props.fieldKey + '_trackScreen']) TrackMovements.Instance.start(); this.dataDoc[this._props.fieldKey + '_recordingStart'] = new DateField(new Date()); }; this._videoRec.ondataavailable = e => vidChunks.push(e.data); this._videoRec.onstop = async () => { const presentation = TrackMovements.Instance.yieldPresentation(); if (presentation?.movements) { const presCopy = { ...presentation }; presCopy.movements = presentation.movements.map(movement => ({ ...movement, doc: (movement.doc as Doc)[Id] }) as Movement); this.dataDoc[this.fieldKey + '_presentation'] = JSON.stringify(presCopy); } TrackMovements.Instance.finish(); const file = new File(vidChunks, `${this.Document[Id]}.mkv`, { type: vidChunks[0].type, lastModified: Date.now() }); const [{ result }] = await Networking.UploadFilesToServer({ file }); this.dataDoc[this.fieldKey + '_duration'] = (new Date().getTime() - this.recordingStart!) / 1000; if (!(result instanceof Error)) { // convert this screenshotBox into normal videoBox this.dataDoc.type = DocumentType.VID; this.layoutDoc.layout = VideoBox.LayoutString(this.fieldKey); this.dataDoc.nativeWidth = this.dataDoc.nativeHeight = undefined; this.layoutDoc._layout_fitWidth = undefined; this.dataDoc[this._props.fieldKey] = new VideoField(result.accessPaths.agnostic.client); } else alert('video conversion failed'); }; this._audioRec.start(); this._videoRec.start(); runInAction(() => { this._screenCapture = true; this.dataDoc.mediaState = 'recording'; }); DocViewUtils.ActiveRecordings.push(this); } else { this._audioRec?.stop(); this._videoRec?.stop(); runInAction(() => { this._screenCapture = false; this.dataDoc.mediaState = 'paused'; }); const ind = DocViewUtils.ActiveRecordings.indexOf(this); ind !== -1 && DocViewUtils.ActiveRecordings.splice(ind, 1); CaptureManager.Instance.open(this.Document); } }; setupDictation = () => { if (this.dataDoc[this.fieldKey + '_dictation']) return; const dictationText = DocUtils.GetNewTextDoc('dictation', NumCast(this.Document.x), NumCast(this.Document.y) + NumCast(this.layoutDoc._height) + 10, NumCast(this.layoutDoc._width), 2 * NumCast(this.layoutDoc._height)); const textField = Doc.LayoutFieldKey(dictationText); dictationText._layout_autoHeight = false; const dictationTextProto = dictationText[DocData]; dictationTextProto[`${textField}_recordingSource`] = this.dataDoc; dictationTextProto[`${textField}_recordingStart`] = ComputedField.MakeFunction(`this.${textField}_recordingSource.${this.fieldKey}_recordingStart`); dictationTextProto.mediaState = ComputedField.MakeFunction(`this.${textField}_recordingSource.mediaState`); this.dataDoc[this.fieldKey + '_dictation'] = dictationText; }; videoPanelHeight = () => (NumCast(this.dataDoc[this.fieldKey + '_nativeHeight'], NumCast(this.layoutDoc._height)) / NumCast(this.dataDoc[this.fieldKey + '_nativeWidth'], NumCast(this.layoutDoc._width))) * this._props.PanelWidth(); formattedPanelHeight = () => Math.max(0, this._props.PanelHeight() - this.videoPanelHeight()); render() { TraceMobx(); return (
<> {this.threed} {this.content}
{!(this.dataDoc[this.fieldKey + '_dictation'] instanceof Doc) ? null : ( )}
{!this._props.isSelected() ? null : (
)}
); } } Docs.Prototypes.TemplateMap.set(DocumentType.SCREENSHOT, { layout: { view: ScreenshotBox, dataField: 'data' }, options: { acl: '', _layout_nativeDimEditable: true, systemIcon: 'BsCameraFill' }, });