diff options
Diffstat (limited to 'src')
-rw-r--r-- | src/client/util/CurrentUserUtils.ts | 5 | ||||
-rw-r--r-- | src/client/views/nodes/ScreenshotBox.scss | 7 | ||||
-rw-r--r-- | src/client/views/nodes/ScreenshotBox.tsx | 186 |
3 files changed, 164 insertions, 34 deletions
diff --git a/src/client/util/CurrentUserUtils.ts b/src/client/util/CurrentUserUtils.ts index 3add0b8be..0dc8cc5c0 100644 --- a/src/client/util/CurrentUserUtils.ts +++ b/src/client/util/CurrentUserUtils.ts @@ -425,6 +425,10 @@ export class CurrentUserUtils { if (doc.emptyScreenshot === undefined) { doc.emptyScreenshot = Docs.Create.ScreenshotDocument("", { _fitWidth: true, _width: 400, _height: 200, title: "screen snapshot", system: true, cloneFieldFilter: new List<string>(["system"]) }); } + if (doc.emptyWall === undefined) { + doc.emptyWall = Docs.Create.ScreenshotDocument("", { _fitWidth: true, _width: 400, _height: 200, title: "screen snapshot", system: true, cloneFieldFilter: new List<string>(["system"]) }); + (doc.emptyWall as Doc).videoWall = true; + } if (doc.emptyAudio === undefined) { doc.emptyAudio = Docs.Create.AudioDocument(nullAudio, { _width: 200, title: "audio recording", system: true, cloneFieldFilter: new List<string>(["system"]) }); ((doc.emptyAudio as Doc).proto as Doc)["dragFactory-count"] = 0; @@ -454,6 +458,7 @@ export class CurrentUserUtils { { toolTip: "Tap to create a cat image in a new pane, drag for a cat image", title: "Image", icon: "cat", click: 'openOnRight(copyDragFactory(this.dragFactory))', drag: 'copyDragFactory(this.dragFactory)', dragFactory: doc.emptyImage as Doc }, { toolTip: "Tap to create a comparison box in a new pane, drag for a comparison box", title: "Compare", icon: "columns", click: 'openOnRight(copyDragFactory(this.dragFactory))', drag: 'copyDragFactory(this.dragFactory)', dragFactory: doc.emptyComparison as Doc, noviceMode: true }, { toolTip: "Tap to create a screen grabber in a new pane, drag for a screen grabber", title: "Grab", icon: "photo-video", click: 'openOnRight(copyDragFactory(this.dragFactory))', drag: 'copyDragFactory(this.dragFactory)', dragFactory: doc.emptyScreenshot as Doc, noviceMode: true }, + { toolTip: "Tap to create a videoWall", title: "Wall", icon: "photo-video", click: 'openOnRight(copyDragFactory(this.dragFactory))', drag: 'copyDragFactory(this.dragFactory)', dragFactory: doc.emptyWall as Doc }, { toolTip: "Tap to create an audio recorder in a new pane, drag for an audio recorder", title: "Audio", icon: "microphone", click: 'openOnRight(copyDragFactory(this.dragFactory))', drag: 'copyDragFactory(this.dragFactory)', dragFactory: doc.emptyAudio as Doc, noviceMode: true }, { toolTip: "Tap to create a button in a new pane, drag for a button", title: "Button", icon: "bolt", click: 'openOnRight(copyDragFactory(this.dragFactory))', drag: 'copyDragFactory(this.dragFactory)', dragFactory: doc.emptyButton as Doc }, { toolTip: "Tap to create a presentation in a new pane, drag for a presentation", title: "Trails", icon: "pres-trail", click: 'openOnRight(Doc.UserDoc().activePresentation = copyDragFactory(this.dragFactory))', drag: `Doc.UserDoc().activePresentation = copyDragFactory(this.dragFactory)`, dragFactory: doc.emptyPresentation as Doc, noviceMode: true }, diff --git a/src/client/views/nodes/ScreenshotBox.scss b/src/client/views/nodes/ScreenshotBox.scss index ab54cf526..6fb5ea7b3 100644 --- a/src/client/views/nodes/ScreenshotBox.scss +++ b/src/client/views/nodes/ScreenshotBox.scss @@ -10,6 +10,13 @@ // } } +#CANCAN { + canvas { + width:100% !important; + height: 100% !important; + } +} + .screenshotBox-content, .screenshotBox-content-interactive, .screenshotBox-cont-fullScreen { width: 100%; z-index: -1; // 0; // logically this should be 0 (or unset) which would give us transparent brush strokes over videos. However, this makes Chrome crawl to a halt diff --git a/src/client/views/nodes/ScreenshotBox.tsx b/src/client/views/nodes/ScreenshotBox.tsx index 8163b652c..862344a94 100644 --- a/src/client/views/nodes/ScreenshotBox.tsx +++ b/src/client/views/nodes/ScreenshotBox.tsx @@ -1,9 +1,9 @@ import React = require("react"); import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; -import { action, computed, observable } from "mobx"; +import { action, computed, observable, reaction } from "mobx"; import { observer } from "mobx-react"; import { DateField } from "../../../fields/DateField"; -import { Doc, WidthSym } from "../../../fields/Doc"; +import { Doc, WidthSym, HeightSym } from "../../../fields/Doc"; import { documentSchema } from "../../../fields/documentSchemas"; import { Id } from "../../../fields/FieldSymbols"; import { InkTool } from "../../../fields/InkField"; @@ -11,7 +11,7 @@ import { makeInterface } from "../../../fields/Schema"; import { ComputedField } from "../../../fields/ScriptField"; import { Cast, NumCast } from "../../../fields/Types"; import { AudioField, VideoField } from "../../../fields/URLField"; -import { emptyFunction, OmitKeys, returnFalse, returnOne, Utils } from "../../../Utils"; +import { emptyFunction, OmitKeys, returnFalse, returnOne, Utils, numberRange } from "../../../Utils"; import { DocUtils } from "../../documents/Documents"; import { DocumentType } from "../../documents/DocumentTypes"; import { Networking } from "../../Network"; @@ -25,6 +25,9 @@ import "./ScreenshotBox.scss"; import { VideoBox } from "./VideoBox"; import { TraceMobx } from "../../../fields/util"; import { FormattedTextBox } from "./formattedText/FormattedTextBox"; +import { Canvas } from 'react-three-fiber'; +import * as THREE from 'three'; +import { Vector3, Vector2, Camera } from "three" declare class MediaRecorder { constructor(e: any, options?: any); // whatever MediaRecorder has } @@ -32,18 +35,100 @@ declare class MediaRecorder { type ScreenshotDocument = makeInterface<[typeof documentSchema]>; const ScreenshotDocument = makeInterface(documentSchema); +interface VideoTileProps { + raised: { coord: Vector2, off: Vector3 }[]; + setRaised: (r: { coord: Vector2, off: Vector3 }[]) => void; + x: number; + y: number; + rootDoc: Doc; + color: string; +} + +@observer +export class VideoTile extends React.Component<VideoTileProps> { + @observable _videoRef: HTMLVideoElement | undefined; + _mesh: any = undefined; + + render() { + const topLeft = [this.props.x, this.props.y]; + const raised = this.props.raised; + const find = (raised: { coord: Vector2, off: Vector3 }[], what: Vector2) => raised.find(r => r.coord.x === what.x && r.coord.y === what.y); + const tl1 = find(raised, new Vector2(topLeft[0], topLeft[1] + 1)); + const tl2 = find(raised, new Vector2(topLeft[0] + 1, topLeft[1] + 1)); + const tl3 = find(raised, new Vector2(topLeft[0] + 1, topLeft[1])); + const tl4 = find(raised, new Vector2(topLeft[0], topLeft[1])); + const quad_indices = [0, 2, 1, 0, 3, 2]; + const quad_uvs = [0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0]; + const quad_normals = [0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1,]; + const quad_vertices = + [ + topLeft[0] - 0.0 + (tl1?.off.x || 0), topLeft[1] + 1.0 + (tl1?.off.y || 0), 0.0 + (tl1?.off.z || 0), + topLeft[0] + 1.0 + (tl2?.off.x || 0), topLeft[1] + 1.0 + (tl2?.off.y || 0), 0.0 + (tl2?.off.z || 0), + topLeft[0] + 1.0 + (tl3?.off.x || 0), topLeft[1] - 0.0 + (tl3?.off.y || 0), 0.0 + (tl3?.off.z || 0), + topLeft[0] - 0.0 + (tl4?.off.x || 0), topLeft[1] - 0.0 + (tl4?.off.y || 0), 0.0 + (tl4?.off.z || 0) + ]; + + const vertices = new Float32Array(quad_vertices); + const normals = new Float32Array(quad_normals); + const uvs = new Float32Array(quad_uvs); // Each vertex has one uv coordinate for texture mapping + const indices = new Uint32Array(quad_indices); // Use the four vertices to draw the two triangles that make up the square. + const popOut = () => NumCast(this.props.rootDoc.popOut); + const popOff = () => NumCast(this.props.rootDoc.popOff); + return ( + <mesh key={`mesh${topLeft[0]}${topLeft[1]}`} onClick={action(async e => { + this.props.setRaised([ + { coord: new Vector2(topLeft[0], topLeft[1]), off: new Vector3(-popOff(), -popOff(), popOut()) }, + { coord: new Vector2(topLeft[0] + 1, topLeft[1]), off: new Vector3(popOff(), -popOff(), popOut()) }, + { coord: new Vector2(topLeft[0], topLeft[1] + 1), off: new Vector3(-popOff(), popOff(), popOut()) }, + { coord: new Vector2(topLeft[0] + 1, topLeft[1] + 1), off: new Vector3(popOff(), popOff(), popOut()) } + ]); + if (!this._videoRef) { + (navigator.mediaDevices as any).getDisplayMedia({ video: true }).then(action((stream: any) => { + //const videoSettings = stream.getVideoTracks()[0].getSettings(); + this._videoRef = document.createElement("video"); + Object.assign(this._videoRef, { + srcObject: stream, + //height: videoSettings.height, + //width: videoSettings.width, + autoplay: true + }); + })); + } + })} ref={(r: any) => this._mesh = r}> + <bufferGeometry attach="geometry" ref={(r: any) => { + // itemSize = 3 because there are 3 values (components) per vertex + r?.setAttribute('position', new THREE.BufferAttribute(vertices, 3)); + r?.setAttribute('normal', new THREE.BufferAttribute(normals, 3)); + r?.setAttribute('uv', new THREE.BufferAttribute(uvs, 2)); + r?.setIndex(new THREE.BufferAttribute(indices, 1)); + }} /> + {!this._videoRef ? <meshStandardMaterial color={this.props.color} /> : + <meshBasicMaterial > + <videoTexture attach="map" args={[this._videoRef]} /> + </meshBasicMaterial>} + </mesh> + ) + }; +} + @observer export class ScreenshotBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProps & FieldViewProps, ScreenshotDocument>(ScreenshotDocument) { public static LayoutString(fieldKey: string) { return FieldView.LayoutString(ScreenshotBox, fieldKey); } - private _videoRef = React.createRef<HTMLVideoElement>(); private _audioRec: any; private _videoRec: any; + @observable private _videoRef: HTMLVideoElement | undefined; @observable _screenCapture = false; @computed get recordingStart() { return Cast(this.dataDoc[this.props.fieldKey + "-recordingStart"], DateField)?.date.getTime(); } constructor(props: any) { super(props); - this.setupDictation(); + if (!this.rootDoc.videoWall) this.setupDictation(); + else { + this.rootDoc.nativeWidth = undefined; + this.rootDoc.nativeHeight = undefined; + this.layoutDoc.popOff = 0; + this.layoutDoc.popOut = 1; + } } getAnchor = () => { const startTime = Cast(this.layoutDoc._currentTimecode, "number", null) || (this._videoRec ? (Date.now() - (this.recordingStart || 0)) / 1000 : undefined); @@ -53,7 +138,7 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatabl } videoLoad = () => { - const aspect = this._videoRef.current!.videoWidth / this._videoRef.current!.videoHeight; + const aspect = this._videoRef!.videoWidth / this._videoRef!.videoHeight; const nativeWidth = Doc.NativeWidth(this.layoutDoc); const nativeHeight = Doc.NativeHeight(this.layoutDoc); if (!nativeWidth || !nativeHeight) { @@ -66,6 +151,16 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatabl componentDidMount() { this.dataDoc.nativeWidth = this.dataDoc.nativeHeight = 0; this.props.setContentView?.(this); // this tells the DocumentView that this ScreenshotBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link. + this.rootDoc.videoWall && reaction(() => ({ width: this.props.PanelWidth(), height: this.props.PanelHeight() }), + ({ width, height }) => { + if (this._camera) { + const angle = -Math.abs(1 - width / height); + const xz = [0, (this._numScreens - 2) / Math.abs(1 + angle)]; + this._camera.position.set(this._numScreens / 2 + xz[1] * Math.sin(angle), this._numScreens / 2, xz[1] * Math.cos(angle)); + this._camera.lookAt(this._numScreens / 2, this._numScreens / 2, 0); + (this._camera as any).updateProjectionMatrix(); + } + }); } componentWillUnmount() { const ind = DocUtils.ActiveRecordings.indexOf(this); @@ -79,7 +174,7 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatabl @computed get content() { const interactive = CurrentUserUtils.SelectedTool !== InkTool.None || !this.props.isSelected() ? "" : "-interactive"; - return <video className={"videoBox-content" + interactive} key="video" ref={this._videoRef} + return <video className={"videoBox-content" + interactive} key="video" ref={action((r: any) => this._videoRef = r)} autoPlay={this._screenCapture} style={{ width: this._screenCapture ? "100%" : undefined, height: this._screenCapture ? "100%" : undefined }} onCanPlay={this.videoLoad} @@ -90,6 +185,27 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatabl </video>; } + _numScreens = 5; + _camera: Camera | undefined; + @observable _raised = [] as { coord: Vector2, off: Vector3 }[]; + @action setRaised = (r: { coord: Vector2, off: Vector3 }[]) => this._raised = r; + @computed get threed() { + if (!this.rootDoc.videoWall) return (null); + const screens: any[] = []; + const colors = ["yellow", "red", "orange", "brown", "maroon", "gray"]; + let count = 0; + numberRange(this._numScreens).forEach(x => numberRange(this._numScreens).forEach(y => screens.push( + <VideoTile rootDoc={this.rootDoc} color={colors[count++ % colors.length]} x={x} y={y} raised={this._raised} setRaised={this.setRaised} />))); + return <Canvas key="canvas" id="CANCAN" style={{ width: this.props.PanelWidth(), height: this.props.PanelHeight() }} gl={{ antialias: false }} colorManagement={false} onCreated={props => { + this._camera = props.camera; + props.camera.position.set(this._numScreens / 2, this._numScreens / 2, this._numScreens - 2); + props.camera.lookAt(this._numScreens / 2, this._numScreens / 2, 0); + }}> + {/* <ambientLight />*/} + <pointLight position={[10, 10, 10]} intensity={1} /> + {screens} + </ Canvas> + }; toggleRecording = action(async () => { this._screenCapture = !this._screenCapture; if (this._screenCapture) { @@ -102,8 +218,8 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatabl this.dataDoc[this.props.fieldKey + "-audio"] = new AudioField(Utils.prepend(result.accessPaths.agnostic.client)); } }; - this._videoRef.current!.srcObject = await (navigator.mediaDevices as any).getDisplayMedia({ video: true }); - this._videoRec = new MediaRecorder(this._videoRef.current!.srcObject); + this._videoRef!.srcObject = await (navigator.mediaDevices as any).getDisplayMedia({ video: true }); + this._videoRec = new MediaRecorder(this._videoRef!.srcObject); const vid_chunks: any = []; this._videoRec.onstart = () => this.dataDoc[this.props.fieldKey + "-recordingStart"] = new DateField(new Date()); this._videoRec.ondataavailable = (e: any) => vid_chunks.push(e.data); @@ -144,14 +260,14 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatabl dictationTextProto.mediaState = ComputedField.MakeFunction("self.recordingSource.mediaState"); this.dataDoc[this.fieldKey + "-dictation"] = dictationText; } - contentFunc = () => [this.content]; + contentFunc = () => [this.threed, this.content]; videoPanelHeight = () => NumCast(this.dataDoc[this.fieldKey + "-nativeHeight"], 1) / NumCast(this.dataDoc[this.fieldKey + "-nativeWidth"], 1) * this.props.PanelWidth(); formattedPanelHeight = () => Math.max(0, this.props.PanelHeight() - this.videoPanelHeight()); render() { TraceMobx(); return <div className="videoBox" onContextMenu={this.specificContextMenu} style={{ width: "100%", height: "100%" }} > <div className="videoBox-viewer" > - <div style={{ position: "relative", height: this.videoPanelHeight() }}> + <div style={{ position: "relative", height: "100%" }}> <CollectionFreeFormView {...OmitKeys(this.props, ["NativeWidth", "NativeHeight"]).omit} PanelHeight={this.videoPanelHeight} PanelWidth={this.props.PanelWidth} @@ -171,29 +287,31 @@ export class ScreenshotBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatabl ContainingCollectionDoc={this.props.ContainingCollectionDoc}> {this.contentFunc} </CollectionFreeFormView></div> - <div style={{ position: "relative", height: this.formattedPanelHeight() }}> - <FormattedTextBox {...OmitKeys(this.props, ["NativeWidth", "NativeHeight"]).omit} - Document={this.dataDoc[this.fieldKey + "-dictation"]} - fieldKey={"text"} - PanelHeight={this.formattedPanelHeight} - PanelWidth={this.props.PanelWidth} - focus={this.props.focus} - isSelected={this.props.isSelected} - isAnnotationOverlay={true} - select={emptyFunction} - isContentActive={returnFalse} - scaling={returnOne} - xMargin={25} - yMargin={10} - whenChildContentsActiveChanged={emptyFunction} - removeDocument={returnFalse} - moveDocument={returnFalse} - addDocument={returnFalse} - CollectionView={undefined} - ScreenToLocalTransform={this.props.ScreenToLocalTransform} - renderDepth={this.props.renderDepth + 1} - ContainingCollectionDoc={this.props.ContainingCollectionDoc}> - </FormattedTextBox></div> + {!(this.dataDoc[this.fieldKey + "-dictation"] instanceof Doc) ? (null) : + <div className="videoBox-dictation" style={{ position: "relative", height: this.formattedPanelHeight() }}> + <FormattedTextBox {...OmitKeys(this.props, ["NativeWidth", "NativeHeight"]).omit} + Document={this.dataDoc[this.fieldKey + "-dictation"]} + fieldKey={"text"} + PanelHeight={this.formattedPanelHeight} + PanelWidth={this.props.PanelWidth} + focus={this.props.focus} + isSelected={this.props.isSelected} + isAnnotationOverlay={true} + select={emptyFunction} + isContentActive={returnFalse} + scaling={returnOne} + xMargin={25} + yMargin={10} + whenChildContentsActiveChanged={emptyFunction} + removeDocument={returnFalse} + moveDocument={returnFalse} + addDocument={returnFalse} + CollectionView={undefined} + ScreenToLocalTransform={this.props.ScreenToLocalTransform} + renderDepth={this.props.renderDepth + 1} + ContainingCollectionDoc={this.props.ContainingCollectionDoc}> + </FormattedTextBox> + </div>} </div> {!this.props.isSelected() ? (null) : <div className="screenshotBox-uiButtons"> <div className="screenshotBox-recorder" key="snap" onPointerDown={this.toggleRecording} > |