From d0f130d21c3e029592f376ff205b6f82b76b4e6e Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 4 Jun 2019 20:04:19 -0400 Subject: initial commit --- src/client/views/MainView.tsx | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) (limited to 'src') diff --git a/src/client/views/MainView.tsx b/src/client/views/MainView.tsx index a093ffdec..408d454f4 100644 --- a/src/client/views/MainView.tsx +++ b/src/client/views/MainView.tsx @@ -52,6 +52,15 @@ export class MainView extends React.Component { } } + componentWillMount() { + document.removeEventListener("keydown", this.globalKeyHandler); + document.addEventListener("keydown", this.globalKeyHandler); + } + + componentWillUnMount() { + document.removeEventListener("keydown", this.globalKeyHandler); + } + constructor(props: Readonly<{}>) { super(props); MainView.Instance = this; @@ -299,6 +308,19 @@ export class MainView extends React.Component { this.isSearchVisible = !this.isSearchVisible; } + globalKeyHandler = (e: KeyboardEvent) => { + if (e.key === "Control" || !e.ctrlKey) return; + + e.preventDefault(); + e.stopPropagation(); + + switch (e.key) { + case "ArrowRight": + CollectionDockingView.Instance.AddRightSplit(this.mainContainer!); + console.log("split screen right"); + } + } + render() { return (
-- cgit v1.2.3-70-g09d2 From a30cbfd90f3b5207fc790a1c8dc61e58f69f4e38 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 5 Jun 2019 22:13:34 -0400 Subject: tab focus shifting on tab drag over and beginnings of global key handling, including splitting with control + rightarrow --- src/client/views/MainView.tsx | 36 +++++++++++++++++++--- .../views/collections/CollectionDockingView.tsx | 20 ++++++++++++ 2 files changed, 52 insertions(+), 4 deletions(-) (limited to 'src') diff --git a/src/client/views/MainView.tsx b/src/client/views/MainView.tsx index 408d454f4..67a026897 100644 --- a/src/client/views/MainView.tsx +++ b/src/client/views/MainView.tsx @@ -26,13 +26,14 @@ import { PreviewCursor } from './PreviewCursor'; import { SearchBox } from './SearchBox'; import { SelectionManager } from '../util/SelectionManager'; import { FieldResult, Field, Doc, Opt, DocListCast } from '../../new_fields/Doc'; -import { Cast, FieldValue, StrCast } from '../../new_fields/Types'; +import { Cast, FieldValue, StrCast, PromiseValue } from '../../new_fields/Types'; import { DocServer } from '../DocServer'; import { listSpec } from '../../new_fields/Schema'; import { Id } from '../../new_fields/FieldSymbols'; import { HistoryUtil } from '../util/History'; import { CollectionBaseView } from './collections/CollectionBaseView'; - +import { timingSafeEqual } from 'crypto'; +import * as _ from "lodash"; @observer export class MainView extends React.Component { @@ -43,6 +44,14 @@ export class MainView extends React.Component { @computed private get mainContainer(): Opt { return FieldValue(Cast(CurrentUserUtils.UserDocument.activeWorkspace, Doc)); } + @computed private get mainFreeform(): Opt { + let docs = DocListCast(this.mainContainer!.data); + return (docs && docs.length > 1) ? docs[1] : undefined; + } + private globalDisplayFlags = observable({ + jumpToVisible: false + }); + private set mainContainer(doc: Opt) { if (doc) { if (!("presentationView" in doc)) { @@ -308,6 +317,7 @@ export class MainView extends React.Component { this.isSearchVisible = !this.isSearchVisible; } + @action globalKeyHandler = (e: KeyboardEvent) => { if (e.key === "Control" || !e.ctrlKey) return; @@ -316,11 +326,28 @@ export class MainView extends React.Component { switch (e.key) { case "ArrowRight": - CollectionDockingView.Instance.AddRightSplit(this.mainContainer!); - console.log("split screen right"); + if (this.mainFreeform) { + CollectionDockingView.Instance.AddRightSplit(this.mainFreeform!); + } + break; + case "ArrowLeft": + if (this.mainFreeform) { + CollectionDockingView.Instance.CloseRightSplit(this.mainFreeform!); + } + break; + case "o": + this.globalDisplayFlags.jumpToVisible = true; + break; + case "escape": + _.mapValues(this.globalDisplayFlags, () => false) + break; } } + renderJumpTo = () => { + return
JUMP TO
; + } + render() { return (
@@ -332,6 +359,7 @@ export class MainView extends React.Component { {this.miscButtons} + {this.globalDisplayFlags.jumpToVisible ? this.renderJumpTo() : (null)}
); } diff --git a/src/client/views/collections/CollectionDockingView.tsx b/src/client/views/collections/CollectionDockingView.tsx index dcc1bd95d..4b7868e81 100644 --- a/src/client/views/collections/CollectionDockingView.tsx +++ b/src/client/views/collections/CollectionDockingView.tsx @@ -44,6 +44,7 @@ export class CollectionDockingView extends React.Component(); private _flush: boolean = false; private _ignoreStateChange = ""; + private _isPointerDown = false; constructor(props: SubCollectionViewProps) { super(props); @@ -247,6 +248,7 @@ export class CollectionDockingView extends React.Component { + this._isPointerDown = false; if (this._flush) { this._flush = false; setTimeout(() => this.stateChanged(), 10); @@ -254,6 +256,7 @@ export class CollectionDockingView extends React.Component { + this._isPointerDown = true; var className = (e.target as any).className; if (className === "messageCounter") { e.stopPropagation(); @@ -334,6 +337,23 @@ export class CollectionDockingView extends React.Component { + if (!this._isPointerDown) return; + var activeContentItem = tab.header.parent.getActiveContentItem(); + if (tab.contentItem !== activeContentItem) { + tab.header.parent.setActiveContentItem(tab.contentItem); + } + tab.setActive(true); + } + // tab.element[0].ondragenter = (e: any) => { + // console.log("DRAGGING OVER DETECTED!"); + // console.log(e); + // } + // tab.element[0].ondrag = (e: any) => { + // console.log("DRAGGING!"); + // console.log(e); + // } ReactDOM.render( CollectionDockingView.Instance.AddTab(stack, doc)} />, upDiv); tab.reactComponents = [upDiv]; tab.element.append(upDiv); -- cgit v1.2.3-70-g09d2 From 79b37db46fda36cd779645256b03d9d074141eb6 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 5 Jun 2019 22:13:49 -0400 Subject: cleanup and documentation --- src/client/views/collections/CollectionDockingView.tsx | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) (limited to 'src') diff --git a/src/client/views/collections/CollectionDockingView.tsx b/src/client/views/collections/CollectionDockingView.tsx index 4b7868e81..1adb73bcf 100644 --- a/src/client/views/collections/CollectionDockingView.tsx +++ b/src/client/views/collections/CollectionDockingView.tsx @@ -337,7 +337,7 @@ export class CollectionDockingView extends React.Component { if (!this._isPointerDown) return; var activeContentItem = tab.header.parent.getActiveContentItem(); @@ -346,14 +346,6 @@ export class CollectionDockingView extends React.Component { - // console.log("DRAGGING OVER DETECTED!"); - // console.log(e); - // } - // tab.element[0].ondrag = (e: any) => { - // console.log("DRAGGING!"); - // console.log(e); - // } ReactDOM.render( CollectionDockingView.Instance.AddTab(stack, doc)} />, upDiv); tab.reactComponents = [upDiv]; tab.element.append(upDiv); -- cgit v1.2.3-70-g09d2 From c789df5ae7a9e364f0d95b54f4a2f330b536a393 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 11 Jun 2019 13:29:48 -0400 Subject: some inline documentation and new template skeletons --- src/client/views/Templates.tsx | 12 +++- .../views/collections/CollectionBaseView.tsx | 13 ++-- .../caption_toggle/DetailedCaptionToggle.tsx | 72 ++++++++++++++++++++++ .../document_templates/image_card/ImageCard.tsx | 18 ++++++ src/client/views/nodes/DocumentContentsView.tsx | 3 +- src/client/views/nodes/FieldView.tsx | 1 + src/client/views/nodes/FormattedTextBox.tsx | 1 - src/documentation/collection_hierarchies.txt | 50 +++++++++++++++ 8 files changed, 161 insertions(+), 9 deletions(-) create mode 100644 src/client/views/document_templates/caption_toggle/DetailedCaptionToggle.tsx create mode 100644 src/client/views/document_templates/image_card/ImageCard.tsx create mode 100644 src/documentation/collection_hierarchies.txt (limited to 'src') diff --git a/src/client/views/Templates.tsx b/src/client/views/Templates.tsx index 0cd367bcb..df53284ed 100644 --- a/src/client/views/Templates.tsx +++ b/src/client/views/Templates.tsx @@ -39,12 +39,18 @@ export class Template { export namespace Templates { // export const BasicLayout = new Template("Basic layout", "{layout}"); + // export const Caption = new Template("Caption", TemplatePosition.OutterBottom, + // `
+ //
{layout}
+ //
+ // + //
+ //
` ); + export const Caption = new Template("Caption", TemplatePosition.OutterBottom, `
{layout}
-
- -
+
` ); export const TitleOverlay = new Template("TitleOverlay", TemplatePosition.InnerTop, diff --git a/src/client/views/collections/CollectionBaseView.tsx b/src/client/views/collections/CollectionBaseView.tsx index 734669893..a3019f23e 100644 --- a/src/client/views/collections/CollectionBaseView.tsx +++ b/src/client/views/collections/CollectionBaseView.tsx @@ -106,14 +106,19 @@ export class CollectionBaseView extends React.Component { } if (!this.createsCycle(doc, props.Document)) { //TODO This won't create the field if it doesn't already exist - const value = Cast(props.Document[props.fieldKey], listSpec(Doc)); + const childDocs = DocListCast(props.Document[props.fieldKey]); let alreadyAdded = true; - if (value !== undefined) { - if (allowDuplicates || !value.some(v => v instanceof Doc && v[Id] === doc[Id])) { + if (childDocs !== undefined) { + // if this is not the first document added to the collection + if (allowDuplicates || !childDocs.some(v => v instanceof Doc && v[Id] === doc[Id])) { alreadyAdded = false; - value.push(doc); + childDocs.push(doc); } + // if we're here, we've tried to add a duplicate } else { + // if we are the first, set up a new list for this and all + // future child documents stored in the associated collection document at the fieldKey (likely .data) + // passed in via props alreadyAdded = false; Doc.SetOnPrototype(this.props.Document, this.props.fieldKey, new List([doc])); } diff --git a/src/client/views/document_templates/caption_toggle/DetailedCaptionToggle.tsx b/src/client/views/document_templates/caption_toggle/DetailedCaptionToggle.tsx new file mode 100644 index 000000000..2172f2852 --- /dev/null +++ b/src/client/views/document_templates/caption_toggle/DetailedCaptionToggle.tsx @@ -0,0 +1,72 @@ +import * as React from 'react'; +import { FontWeightProperty, FontStyleProperty, FontSizeProperty, ColorProperty } from 'csstype'; +import { observer } from 'mobx-react'; +import { observable, action, runInAction } from 'mobx'; +import { FormattedTextBox, FormattedTextBoxProps } from '../../nodes/FormattedTextBox'; +import { FieldViewProps } from '../../nodes/FieldView'; + +interface DetailedCaptionDataProps { + captionFieldKey?: string, + detailsFieldKey?: string, +} + +interface DetailedCaptionStylingProps { + sharedFontColor?: ColorProperty; + captionFontStyle?: FontStyleProperty + detailsFontStyle?: FontStyleProperty + toggleSize?: number +} + +@observer +export default class DetailedCaptionToggle extends React.Component { + @observable loaded: boolean = false; + @observable detailsExpanded: boolean = false; + + @action toggleDetails = (e: React.MouseEvent) => { + e.preventDefault(); + e.stopPropagation(); + this.detailsExpanded = !this.detailsExpanded; + } + + componentDidMount() { + runInAction(() => this.loaded = true); + } + + render() { + let size = this.props.toggleSize || 20; + return ( +
+ {/* caption */} +
+ +
+ {/* details */} +
+ +
+ {/* toggle */} +
+ +
+
+ ); + } + +} diff --git a/src/client/views/document_templates/image_card/ImageCard.tsx b/src/client/views/document_templates/image_card/ImageCard.tsx new file mode 100644 index 000000000..9931515f3 --- /dev/null +++ b/src/client/views/document_templates/image_card/ImageCard.tsx @@ -0,0 +1,18 @@ +import * as React from 'react'; +import { DocComponent } from '../../DocComponent'; +import { FieldViewProps } from '../../nodes/FieldView'; +import { createSchema, makeInterface } from '../../../../new_fields/Schema'; +import { createInterface } from 'readline'; +import { ImageBox } from '../../nodes/ImageBox'; + +export default class ImageCard extends React.Component { + + render() { + return ( +
+ +
+ ); + } + +} \ No newline at end of file diff --git a/src/client/views/nodes/DocumentContentsView.tsx b/src/client/views/nodes/DocumentContentsView.tsx index 02396c3af..b6c150854 100644 --- a/src/client/views/nodes/DocumentContentsView.tsx +++ b/src/client/views/nodes/DocumentContentsView.tsx @@ -23,6 +23,7 @@ import { FieldViewProps } from "./FieldView"; import { Without, OmitKeys } from "../../../Utils"; import { Cast, StrCast, NumCast } from "../../../new_fields/Types"; import { List } from "../../../new_fields/List"; +import DetailedCaptionToggle from "../document_templates/caption_toggle/DetailedCaptionToggle"; const JsxParser = require('react-jsx-parser').default; //TODO Why does this need to be imported like this? type BindingProps = Without; @@ -103,7 +104,7 @@ export class DocumentContentsView extends React.Component DragManager.SetupDrag() +DragManager.SetupDrag.onRowMove() => DragManager.StartDocumentDrag() +DragManager.StartDrag() + +... (USER IS DRAGGING DOCUMENT AROUND VIA BUTTON) +... (USER DROPS THE DOCUMENT IN THE TARGET COLLECTION) + +CollectionSubView.drop() + + + { + Nodes themselves, both base types and collections, are actually always rendered by using a JSXParser to parse a stringified JSX element layout (see + FieldView.LayoutString()). Typically, way back in the initial drag phase, where the buttons maintained document creation + functions like Documents.ImageDocument(), the layout string will have always been set, because of the way that new node + documents are created. The ImageDocument() function creates a delegate from the imageProto (image document prototype) which is itself created at the time + Dash is loaded. Since the delegate inherits the prototype's layout string, the layoutKey field will be set and effectively always, the JSXParser will + parse the existing layout string to return the appropriate JSX element to be rendered as a child of the collection sub view. On the off chance that this + layout field has not been set, the layout() getter just returns a generic FieldView element to the JSXParser, and internally, this component decides based + on the nature of the document it receives, which node view to assign. This is basically a fallback. + } + + + // all of the below extend + + + + + + \ No newline at end of file -- cgit v1.2.3-70-g09d2 From c0aa7c79258ea8409611da710dc802e3481c34d8 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 11 Jun 2019 14:19:51 -0400 Subject: fixed the temporary inability to add documents :( --- src/client/views/Templates.tsx | 18 +++++++++--------- src/client/views/collections/CollectionBaseView.tsx | 2 +- src/client/views/nodes/DocumentContentsView.tsx | 2 +- 3 files changed, 11 insertions(+), 11 deletions(-) (limited to 'src') diff --git a/src/client/views/Templates.tsx b/src/client/views/Templates.tsx index df53284ed..e4a9258b7 100644 --- a/src/client/views/Templates.tsx +++ b/src/client/views/Templates.tsx @@ -39,20 +39,20 @@ export class Template { export namespace Templates { // export const BasicLayout = new Template("Basic layout", "{layout}"); - // export const Caption = new Template("Caption", TemplatePosition.OutterBottom, - // `
- //
{layout}
- //
- // - //
- //
` ); - export const Caption = new Template("Caption", TemplatePosition.OutterBottom, `
{layout}
- +
+ +
` ); + // export const Caption = new Template("Caption", TemplatePosition.OutterBottom, + // `
+ //
{layout}
+ // + //
` ); + export const TitleOverlay = new Template("TitleOverlay", TemplatePosition.InnerTop, `
{layout}
diff --git a/src/client/views/collections/CollectionBaseView.tsx b/src/client/views/collections/CollectionBaseView.tsx index a3019f23e..4d6721dc1 100644 --- a/src/client/views/collections/CollectionBaseView.tsx +++ b/src/client/views/collections/CollectionBaseView.tsx @@ -106,7 +106,7 @@ export class CollectionBaseView extends React.Component { } if (!this.createsCycle(doc, props.Document)) { //TODO This won't create the field if it doesn't already exist - const childDocs = DocListCast(props.Document[props.fieldKey]); + const childDocs = Cast(props.Document[props.fieldKey], listSpec(Doc)); let alreadyAdded = true; if (childDocs !== undefined) { // if this is not the first document added to the collection diff --git a/src/client/views/nodes/DocumentContentsView.tsx b/src/client/views/nodes/DocumentContentsView.tsx index b6c150854..27843d33a 100644 --- a/src/client/views/nodes/DocumentContentsView.tsx +++ b/src/client/views/nodes/DocumentContentsView.tsx @@ -104,7 +104,7 @@ export class DocumentContentsView extends React.Component Date: Wed, 12 Jun 2019 23:22:58 -0400 Subject: beginnings of nested golden layout --- src/client/documents/Documents.ts | 18 +++ src/client/util/DocumentManager.ts | 4 +- src/client/util/DragManager.ts | 8 +- src/client/util/TooltipTextMenu.tsx | 2 +- src/client/views/MainOverlayTextBox.tsx | 4 +- src/client/views/MainView.tsx | 20 ++- .../views/collections/CollectionBaseView.tsx | 2 +- .../views/collections/CollectionDockingView.tsx | 162 ++++++++++++++------- .../views/collections/CollectionTreeView.tsx | 2 +- .../views/collections/ParentDocumentSelector.tsx | 4 +- src/client/views/nodes/DocumentView.tsx | 12 +- 11 files changed, 158 insertions(+), 80 deletions(-) (limited to 'src') diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index ab61b915c..b346e1570 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -35,6 +35,7 @@ import { dropActionType } from "../util/DragManager"; import { DateField } from "../../new_fields/DateField"; import { UndoManager } from "../util/UndoManager"; import { RouteStore } from "../../server/RouteStore"; +import { CollectionDockingView } from "../views/collections/CollectionDockingView"; var requestImageSize = require('request-image-size'); var path = require('path'); @@ -315,6 +316,23 @@ export namespace Docs { export function DockDocument(documents: Array, config: string, options: DocumentOptions, id?: string) { return CreateInstance(collProto, new List(documents), { ...options, viewType: CollectionViewType.Docking, dockingConfig: config }, id); } + export type DocConfig = { + doc: Doc, + initialWidth?: number + } + export function StandardCollectionDockingDocument(configs: Array, options: DocumentOptions, id?: string, type: string = "row") { + let layoutConfig = { + content: [ + { + type: type, + content: [ + ...configs.map(config => CollectionDockingView.makeDocumentConfig(config.doc, config.initialWidth)) + ] + } + ] + }; + return DockDocument(configs.map(c => c.doc), JSON.stringify(layoutConfig), options, id); + } export function CaptionDocument(doc: Doc) { const captionDoc = Doc.MakeAlias(doc); diff --git a/src/client/util/DocumentManager.ts b/src/client/util/DocumentManager.ts index 65c4b9e4b..72d9f7e76 100644 --- a/src/client/util/DocumentManager.ts +++ b/src/client/util/DocumentManager.ts @@ -134,7 +134,7 @@ export class DocumentManager { const actualDoc = Doc.MakeAlias(docDelegate); actualDoc.libraryBrush = true; if (linkPage !== undefined) actualDoc.curPage = linkPage; - (dockFunc || CollectionDockingView.Instance.AddRightSplit)(actualDoc); + (dockFunc || CollectionDockingView.AddRightSplit)(actualDoc); } else { let contextView: DocumentView | null; docDelegate.libraryBrush = true; @@ -142,7 +142,7 @@ export class DocumentManager { contextDoc.panTransformType = "Ease"; contextView.props.focus(contextDoc); } else { - (dockFunc || CollectionDockingView.Instance.AddRightSplit)(contextDoc); + (dockFunc || CollectionDockingView.AddRightSplit)(contextDoc); } } } diff --git a/src/client/util/DragManager.ts b/src/client/util/DragManager.ts index 1e84a0db0..7625b0463 100644 --- a/src/client/util/DragManager.ts +++ b/src/client/util/DragManager.ts @@ -26,8 +26,8 @@ export function SetupDrag(_reference: React.RefObject, docFunc: () // if (this.props.isSelected() || this.props.isTopMost) { if (e.button === 0) { e.stopPropagation(); - if (e.shiftKey && CollectionDockingView.Instance) { - CollectionDockingView.Instance.StartOtherDrag([await docFunc()], e); + if (e.shiftKey && CollectionDockingView.TopLevel) { + CollectionDockingView.TopLevel.StartOtherDrag([await docFunc()], e); } else { document.addEventListener("pointermove", onRowMove); document.addEventListener("pointerup", onRowUp); @@ -264,9 +264,9 @@ export namespace DragManager { if (dragData instanceof DocumentDragData) { dragData.userDropAction = e.ctrlKey || e.altKey ? "alias" : undefined; } - if (e.shiftKey && CollectionDockingView.Instance) { + if (e.shiftKey && CollectionDockingView.TopLevel) { AbortDrag(); - CollectionDockingView.Instance.StartOtherDrag(docs, { + CollectionDockingView.TopLevel.StartOtherDrag(docs, { pageX: e.pageX, pageY: e.pageY, preventDefault: emptyFunction, diff --git a/src/client/util/TooltipTextMenu.tsx b/src/client/util/TooltipTextMenu.tsx index f517f757a..fa2483db5 100644 --- a/src/client/util/TooltipTextMenu.tsx +++ b/src/client/util/TooltipTextMenu.tsx @@ -194,7 +194,7 @@ export class TooltipTextMenu { if (DocumentManager.Instance.getDocumentView(f)) { DocumentManager.Instance.getDocumentView(f)!.props.focus(f); } - else if (CollectionDockingView.Instance) CollectionDockingView.Instance.AddRightSplit(f); + else if (CollectionDockingView.TopLevel) CollectionDockingView.AddRightSplit(f); } })); } diff --git a/src/client/views/MainOverlayTextBox.tsx b/src/client/views/MainOverlayTextBox.tsx index 24327b995..718979123 100644 --- a/src/client/views/MainOverlayTextBox.tsx +++ b/src/client/views/MainOverlayTextBox.tsx @@ -86,7 +86,7 @@ export class MainOverlayTextBox extends React.Component addDocTab = (doc: Doc, location: string) => { if (true) { // location === "onRight") { need to figure out stack to add "inTab" - CollectionDockingView.Instance.AddRightSplit(doc); + CollectionDockingView.AddRightSplit(doc); } } render() { @@ -102,6 +102,6 @@ export class MainOverlayTextBox extends React.Component
; } - else return (null); Z + else return (null); } } \ No newline at end of file diff --git a/src/client/views/MainView.tsx b/src/client/views/MainView.tsx index 67a026897..426e2440a 100644 --- a/src/client/views/MainView.tsx +++ b/src/client/views/MainView.tsx @@ -1,5 +1,5 @@ import { IconName, library } from '@fortawesome/fontawesome-svg-core'; -import { faFilePdf, faFilm, faFont, faGlobeAsia, faImage, faMusic, faObjectGroup, faPenNib, faRedoAlt, faTable, faTree, faUndoAlt, faBell } from '@fortawesome/free-solid-svg-icons'; +import { faClone, faFilePdf, faFilm, faFont, faGlobeAsia, faImage, faMusic, faObjectGroup, faPenNib, faRedoAlt, faTable, faTree, faUndoAlt, faBell } from '@fortawesome/free-solid-svg-icons'; import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; import { action, computed, configure, observable, runInAction, trace } from 'mobx'; import { observer } from 'mobx-react'; @@ -106,6 +106,7 @@ export class MainView extends React.Component { library.add(faFilm); library.add(faMusic); library.add(faTree); + library.add(faClone) this.initEventListeners(); this.initAuthenticationRouters(); } @@ -151,8 +152,11 @@ export class MainView extends React.Component { const list = Cast(CurrentUserUtils.UserDocument.data, listSpec(Doc)); if (list) { let freeformDoc = Docs.FreeformDocument([], { x: 0, y: 400, width: this.pwidth * .7, height: this.pheight, title: `WS collection ${list.length + 1}` }); - var dockingLayout = { content: [{ type: 'row', content: [CollectionDockingView.makeDocumentConfig(CurrentUserUtils.UserDocument, 150), CollectionDockingView.makeDocumentConfig(freeformDoc, 600)] }] }; - let mainDoc = Docs.DockDocument([CurrentUserUtils.UserDocument, freeformDoc], JSON.stringify(dockingLayout), { title: `Workspace ${list.length + 1}` }, id); + let configs = [ + { doc: CurrentUserUtils.UserDocument, initialWidth: 150 }, + { doc: freeformDoc, initialWidth: 600 } + ] + let mainDoc = Docs.StandardCollectionDockingDocument(configs, { title: `Workspace ${list.length + 1}` }, id); list.push(mainDoc); // bcz: strangely, we need a timeout to prevent exceptions/issues initializing GoldenLayout (the rendering engine for Main Container) setTimeout(() => { @@ -183,8 +187,8 @@ export class MainView extends React.Component { } openNotifsCol = () => { - if (this._notifsCol && CollectionDockingView.Instance) { - CollectionDockingView.Instance.AddRightSplit(this._notifsCol); + if (this._notifsCol && CollectionDockingView.TopLevel) { + CollectionDockingView.AddRightSplit(this._notifsCol); } } @@ -240,6 +244,7 @@ export class MainView extends React.Component { let addTextNode = action(() => Docs.TextDocument({ borderRounding: -1, width: 200, height: 200, title: "a text note" })); let addColNode = action(() => Docs.FreeformDocument([], { width: this.pwidth * .7, height: this.pheight, title: "a freeform collection" })); + let addDockingNode = action(() => Docs.StandardCollectionDockingDocument([{ doc: addColNode(), initialWidth: 200 }], { width: 200, height: 200, title: "a nested docking freeform collection" })); let addSchemaNode = action(() => Docs.SchemaDocument(["title"], [], { width: 200, height: 200, title: "a schema collection" })); let addTreeNode = action(() => CurrentUserUtils.UserDocument); //let addTreeNode = action(() => Docs.TreeDocument([CurrentUserUtils.UserDocument], { width: 250, height: 400, title: "Library:" + CurrentUserUtils.email, dropAction: "alias" })); @@ -260,6 +265,7 @@ export class MainView extends React.Component { [React.createRef(), "object-group", "Add Collection", addColNode], [React.createRef(), "tree", "Add Tree", addTreeNode], [React.createRef(), "table", "Add Schema", addSchemaNode], + [React.createRef(), "clone", "Add Docking Frame", addDockingNode] ]; return < div id="add-nodes-menu" > @@ -327,12 +333,12 @@ export class MainView extends React.Component { switch (e.key) { case "ArrowRight": if (this.mainFreeform) { - CollectionDockingView.Instance.AddRightSplit(this.mainFreeform!); + CollectionDockingView.AddRightSplit(this.mainFreeform!); } break; case "ArrowLeft": if (this.mainFreeform) { - CollectionDockingView.Instance.CloseRightSplit(this.mainFreeform!); + CollectionDockingView.CloseRightSplit(this.mainFreeform!); } break; case "o": diff --git a/src/client/views/collections/CollectionBaseView.tsx b/src/client/views/collections/CollectionBaseView.tsx index 4d6721dc1..096c65092 100644 --- a/src/client/views/collections/CollectionBaseView.tsx +++ b/src/client/views/collections/CollectionBaseView.tsx @@ -16,7 +16,7 @@ export enum CollectionViewType { Schema, Docking, Tree, - Stacking + Stacking, } export interface CollectionRenderProps { diff --git a/src/client/views/collections/CollectionDockingView.tsx b/src/client/views/collections/CollectionDockingView.tsx index 1adb73bcf..dfb8fac35 100644 --- a/src/client/views/collections/CollectionDockingView.tsx +++ b/src/client/views/collections/CollectionDockingView.tsx @@ -1,9 +1,9 @@ import 'golden-layout/src/css/goldenlayout-base.css'; import 'golden-layout/src/css/goldenlayout-dark-theme.css'; -import { action, observable, reaction, Lambda } from "mobx"; +import { action, observable, reaction, Lambda, IReactionDisposer } from "mobx"; import { observer } from "mobx-react"; import * as ReactDOM from 'react-dom'; -import Measure from "react-measure"; +import Measure, { ContentRect } from "react-measure"; import * as GoldenLayout from "../../../client/goldenLayout"; import { Doc, Field, Opt, DocListCast } from "../../../new_fields/Doc"; import { FieldId } from "../../../new_fields/RefField"; @@ -26,7 +26,7 @@ import { CurrentUserUtils } from '../../../server/authentication/models/current_ @observer export class CollectionDockingView extends React.Component { - public static Instance: CollectionDockingView; + public static TopLevel: CollectionDockingView; public static makeDocumentConfig(document: Doc, width?: number) { return { type: 'react-component', @@ -35,11 +35,16 @@ export class CollectionDockingView extends React.Component { + const config = CollectionDockingView.makeDocumentConfig(document, width); + (config.props as any).parent = this; + return config; + } + private _goldenLayout: any = null; private _containerRef = React.createRef(); private _flush: boolean = false; @@ -48,7 +53,7 @@ export class CollectionDockingView extends React.Component - this.AddRightSplit(dragDoc, true).contentItems[0].tab._dragListener. + CollectionDockingView.AddRightSplit(dragDoc, true).contentItems[0].tab._dragListener. onMouseDown({ pageX: e.pageX, pageY: e.pageY, preventDefault: emptyFunction, button: 0 })); } - @action - public OpenFullScreen(document: Doc) { + private openFullScreen = (document: Doc) => { let newItemStackConfig = { type: 'stack', - content: [CollectionDockingView.makeDocumentConfig(document)] + content: [this.makeDocConfig(document)] }; var docconfig = this._goldenLayout.root.layoutManager.createContentItem(newItemStackConfig, this._goldenLayout); this._goldenLayout.root.contentItems[0].addChild(docconfig); docconfig.callDownwards('_$init'); this._goldenLayout._$maximiseItem(docconfig); - this._ignoreStateChange = JSON.stringify(this._goldenLayout.toConfig()); + this._ignoreStateChange = this.retrieveConfiguration(); this.stateChanged(); } + @action + public static OpenFullScreen(document: Doc, dockingView: CollectionDockingView = CollectionDockingView.TopLevel) { + dockingView.openFullScreen(document); + } + + initializeConfiguration = (configText: string) => { + let configuration: any = JSON.parse(configText); + this.injectParentProp(configuration.content); + this._goldenLayout = new GoldenLayout(configuration); + } + + retrieveConfiguration = () => { + let configuration: any = this._goldenLayout.toConfig(); + this.injectParentProp(configuration.content, true); + return JSON.stringify(configuration); + } + + injectParentProp = (contentArray: any[], reverse: boolean = false) => { + if (!contentArray || contentArray.length == 0) return; + contentArray.forEach(member => { + let baseCase = Object.keys(member).includes("props"); + if (!baseCase) { + this.injectParentProp(member.content, reverse) + } else { + reverse ? delete member.props.parent : member.props.parent = this; + } + }); + } + @undoBatch @action - public CloseRightSplit = (document: Doc): boolean => { + public static CloseRightSplit = (document: Doc, dockingView: CollectionDockingView = CollectionDockingView.TopLevel): boolean => { let retVal = false; - if (this._goldenLayout.root.contentItems[0].isRow) { - retVal = Array.from(this._goldenLayout.root.contentItems[0].contentItems).some((child: any) => { + if (dockingView._goldenLayout.root.contentItems[0].isRow) { + retVal = Array.from(dockingView._goldenLayout.root.contentItems[0].contentItems).some((child: any) => { if (child.contentItems.length === 1 && child.contentItems[0].config.component === "DocumentFrameRenderer" && Doc.AreProtosEqual(DocumentManager.Instance.getDocumentViewById(child.contentItems[0].config.props.documentId)!.Document, document)) { child.contentItems[0].remove(); - this.layoutChanged(document); + dockingView.layoutChanged(document); return true; } else { Array.from(child.contentItems).filter((tab: any) => tab.config.component === "DocumentFrameRenderer").some((tab: any, j: number) => { if (Doc.AreProtosEqual(DocumentManager.Instance.getDocumentViewById(tab.config.props.documentId)!.Document, document)) { child.contentItems[j].remove(); child.config.activeItemIndex = Math.max(child.contentItems.length - 1, 0); - let docs = Cast(this.props.Document.data, listSpec(Doc)); + let docs = Cast(dockingView.props.Document.data, listSpec(Doc)); docs && docs.indexOf(document) !== -1 && docs.splice(docs.indexOf(document), 1); return true; } @@ -103,7 +136,7 @@ export class CollectionDockingView extends React.Component { + public static AddRightSplit = (document: Doc, minimize: boolean = false, dockingView: CollectionDockingView = CollectionDockingView.TopLevel) => { + return dockingView.addRightSplit(document, minimize); + } + + private addRightSplit(document: Doc, minimize = false) { let docs = Cast(this.props.Document.data, listSpec(Doc)); if (docs) { docs.push(document); } let newItemStackConfig = { type: 'stack', - content: [CollectionDockingView.makeDocumentConfig(document)] + content: [this.makeDocConfig(document)] }; var newContentItem = this._goldenLayout.root.layoutManager.createContentItem(newItemStackConfig, this._goldenLayout); @@ -157,13 +193,17 @@ export class CollectionDockingView extends React.Component { + + public static AddTab = (stack: any, document: Doc, dockingView: CollectionDockingView = CollectionDockingView.TopLevel) => { + dockingView.addTab(stack, document); + } + + private addTab = (stack: any, document: Doc) => { let docs = Cast(this.props.Document.data, listSpec(Doc)); if (docs) { docs.push(document); } - let docContentConfig = CollectionDockingView.makeDocumentConfig(document); + let docContentConfig = this.makeDocConfig(document); var newContentItem = stack.layoutManager.createContentItem(docContentConfig, this._goldenLayout); stack.addChild(newContentItem.contentItems[0], undefined); this.layoutChanged(); @@ -173,10 +213,10 @@ export class CollectionDockingView extends React.Component void = () => { if (this._containerRef.current) { this.reactionDisposer = reaction( () => StrCast(this.props.Document.dockingConfig), () => { - if (!this._goldenLayout || this._ignoreStateChange !== JSON.stringify(this._goldenLayout.toConfig())) { + if (!this._goldenLayout || this._ignoreStateChange !== this.retrieveConfiguration()) { // Because this is in a set timeout, if this component unmounts right after mounting, // we will leak a GoldenLayout, because we try to destroy it before we ever create it setTimeout(() => this.setupGoldenLayout(), 1); @@ -218,7 +258,7 @@ export class CollectionDockingView extends React.Component void = () => { @@ -232,18 +272,19 @@ export class CollectionDockingView extends React.Component { - var cur = this._containerRef.current; - + onResize = (size: ContentRect) => { // bcz: since GoldenLayout isn't a React component itself, we need to notify it to resize when its document container's size has changed - this._goldenLayout.updateSize(cur!.getBoundingClientRect().width, cur!.getBoundingClientRect().height); + // this._goldenLayout.updateSize(cur!.getBoundingClientRect().width, cur!.getBoundingClientRect().height); + if (this._goldenLayout) { + this._goldenLayout.updateSize(size.offset!.width, size.offset!.height); + } } @action @@ -301,12 +342,12 @@ export class CollectionDockingView extends React.Component { - let docs = Cast(CollectionDockingView.Instance.props.Document.data, listSpec(Doc)); - CollectionDockingView.Instance._removedDocs.map(theDoc => + let docs = Cast(CollectionDockingView.TopLevel.props.Document.data, listSpec(Doc)); + CollectionDockingView.TopLevel._removedDocs.map(theDoc => docs && docs.indexOf(theDoc) !== -1 && docs.splice(docs.indexOf(theDoc), 1)); - CollectionDockingView.Instance._removedDocs.length = 0; - var json = JSON.stringify(this._goldenLayout.toConfig()); + CollectionDockingView.TopLevel._removedDocs.length = 0; + var json = this.retrieveConfiguration(); this.props.Document.dockingConfig = json; if (this.undohack && !this.hack) { this.undohack.end(); @@ -315,18 +356,18 @@ export class CollectionDockingView extends React.Component { + private itemDropped = () => { this.stateChanged(); } - htmlToElement(html: string) { + private htmlToElement(html: string) { var template = document.createElement('template'); html = html.trim(); // Never return a text node of whitespace as the result template.innerHTML = html; return template.content.firstChild; } - tabCreated = async (tab: any) => { + private tabCreated = async (tab: any) => { if (tab.hasOwnProperty("contentItem") && tab.contentItem.config.type !== "stack") { if (tab.contentItem.config.fixed) { tab.contentItem.parent.config.fixed = true; @@ -346,7 +387,7 @@ export class CollectionDockingView extends React.Component CollectionDockingView.Instance.AddTab(stack, doc)} />, upDiv); + ReactDOM.render( CollectionDockingView.AddTab(stack, doc)} />, upDiv); tab.reactComponents = [upDiv]; tab.element.append(upDiv); counter.DashDocId = tab.contentItem.config.props.documentId; @@ -368,13 +409,13 @@ export class CollectionDockingView extends React.Component { + private tabDestroyed = (tab: any) => { if (tab.reactComponents) { for (const ele of tab.reactComponents) { ReactDOM.unmountComponentAtNode(ele); @@ -383,7 +424,7 @@ export class CollectionDockingView extends React.Component { + private stackCreated = (stack: any) => { //stack.header.controlsContainer.find('.lm_popout').hide(); stack.header.controlsContainer.find('.lm_close') //get the close icon .off('click') //unbind the current click handler @@ -394,7 +435,7 @@ export class CollectionDockingView extends React.Component + + {({ measureRef }) => ( +
+ + )} + ); } @@ -419,16 +466,23 @@ export class CollectionDockingView extends React.Component { _mainCont = React.createRef(); @observable private _panelWidth = 0; @observable private _panelHeight = 0; @observable private _document: Opt; + private get parentProps(): SubCollectionViewProps { + return this.props.parent.props; + } + get _stack(): any { - let parent = (this.props as any).glContainer.parent.parent; + let parent = this.props.glContainer.parent.parent; if (this._document && this._document.excludeFromLibrary && parent.parent && parent.parent.contentItems.length > 1) return parent.parent.contentItems[1]; return parent; @@ -451,7 +505,7 @@ export class DockedFrameRenderer extends React.Component { if (this._mainCont.current && this._mainCont.current.children) { let { scale, translateX, translateY } = Utils.GetScreenTransform(this._mainCont.current.children[0].firstChild as HTMLElement); scale = Utils.GetScreenTransform(this._mainCont.current).scale; - return CollectionDockingView.Instance.props.ScreenToLocalTransform().translate(-translateX, -translateY).scale(1 / this.contentScaling() / scale); + return this.parentProps.ScreenToLocalTransform().translate(-translateX, -translateY).scale(1 / this.contentScaling() / scale); } return Transform.Identity(); } @@ -469,9 +523,9 @@ export class DockedFrameRenderer extends React.Component { addDocTab = (doc: Doc, location: string) => { if (location === "onRight") { - CollectionDockingView.Instance.AddRightSplit(doc); + CollectionDockingView.AddRightSplit(doc); } else { - CollectionDockingView.Instance.AddTab(this._stack, doc); + CollectionDockingView.AddTab(this._stack, doc); } } get content() { diff --git a/src/client/views/collections/CollectionTreeView.tsx b/src/client/views/collections/CollectionTreeView.tsx index 48da52ffa..b0a310ec1 100644 --- a/src/client/views/collections/CollectionTreeView.tsx +++ b/src/client/views/collections/CollectionTreeView.tsx @@ -123,7 +123,7 @@ class TreeView extends React.Component { return true; }} />); - let dataDocs = CollectionDockingView.Instance ? Cast(CollectionDockingView.Instance.props.Document.data, listSpec(Doc), []) : []; + let dataDocs = CollectionDockingView.TopLevel ? Cast(CollectionDockingView.TopLevel.props.Document.data, listSpec(Doc), []) : []; let openRight = dataDocs && dataDocs.indexOf(this.props.document) !== -1 ? (null) : (
diff --git a/src/client/views/collections/ParentDocumentSelector.tsx b/src/client/views/collections/ParentDocumentSelector.tsx index f11af04a3..95183bd2c 100644 --- a/src/client/views/collections/ParentDocumentSelector.tsx +++ b/src/client/views/collections/ParentDocumentSelector.tsx @@ -29,8 +29,8 @@ export class SelectorContextMenu extends React.Component { allDocs.forEach((docs, index) => docs.forEach(doc => map.set(doc, aliases[index]))); docs.forEach(doc => map.delete(doc)); runInAction(() => { - this._docs = docs.filter(doc => !Doc.AreProtosEqual(doc, CollectionDockingView.Instance.props.Document)).map(doc => ({ col: doc, target: this.props.Document })); - this._otherDocs = Array.from(map.entries()).filter(entry => !Doc.AreProtosEqual(entry[0], CollectionDockingView.Instance.props.Document)).map(([col, target]) => ({ col, target })); + this._docs = docs.filter(doc => !Doc.AreProtosEqual(doc, CollectionDockingView.TopLevel.props.Document)).map(doc => ({ col: doc, target: this.props.Document })); + this._otherDocs = Array.from(map.entries()).filter(entry => !Doc.AreProtosEqual(entry[0], CollectionDockingView.TopLevel.props.Document)).map(([col, target]) => ({ col, target })); }); } diff --git a/src/client/views/nodes/DocumentView.tsx b/src/client/views/nodes/DocumentView.tsx index 7750b9173..efba26c2c 100644 --- a/src/client/views/nodes/DocumentView.tsx +++ b/src/client/views/nodes/DocumentView.tsx @@ -238,11 +238,11 @@ export class DocumentView extends DocComponent(Docu expandedProtoDocs.forEach(maxDoc => maxDoc.isMinimized = wasMinimized); } } - if (maxLocation && maxLocation !== "inPlace" && CollectionDockingView.Instance) { - let dataDocs = DocListCast(CollectionDockingView.Instance.props.Document.data); + if (maxLocation && maxLocation !== "inPlace" && CollectionDockingView.TopLevel) { + let dataDocs = DocListCast(CollectionDockingView.TopLevel.props.Document.data); if (dataDocs) { expandedDocs.forEach(maxDoc => - (!CollectionDockingView.Instance.CloseRightSplit(Doc.GetProto(maxDoc)) && + (!CollectionDockingView.CloseRightSplit(Doc.GetProto(maxDoc)) && this.props.addDocTab(getDispDoc(maxDoc), maxLocation))); } } else { @@ -270,8 +270,8 @@ export class DocumentView extends DocComponent(Docu this._downX = e.clientX; this._downY = e.clientY; this._hitExpander = DocListCast(this.props.Document.subBulletDocs).length > 0; - if (e.shiftKey && e.buttons === 1 && CollectionDockingView.Instance) { - CollectionDockingView.Instance.StartOtherDrag([Doc.MakeAlias(this.props.Document)], e); + if (e.shiftKey && e.buttons === 1 && CollectionDockingView.TopLevel) { + CollectionDockingView.TopLevel.StartOtherDrag([Doc.MakeAlias(this.props.Document)], e); e.stopPropagation(); } else { if (this.active) e.stopPropagation(); // events stop at the lowest document that is active. @@ -316,7 +316,7 @@ export class DocumentView extends DocComponent(Docu } } fullScreenClicked = (): void => { - CollectionDockingView.Instance && CollectionDockingView.Instance.OpenFullScreen(Doc.MakeCopy(this.props.Document, false)); + CollectionDockingView.TopLevel && CollectionDockingView.OpenFullScreen(Doc.MakeCopy(this.props.Document, false)); SelectionManager.DeselectAll(); } -- cgit v1.2.3-70-g09d2 From d2c9550f23c4e5654822ac01b973bb965e3f6dec Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Fri, 14 Jun 2019 20:49:12 -0400 Subject: cleaned up Docs namespace and thoroughly documented DocServer.GetRefFields --- .vscode/settings.json | 3 +- src/client/DocServer.ts | 80 ++- src/client/documents/Documents.ts | 758 ++++++++++++--------- src/client/views/DocumentDecorations.tsx | 2 +- src/client/views/Main.tsx | 2 +- src/client/views/MainView.tsx | 24 +- src/client/views/SearchBox.tsx | 2 +- .../views/collections/CollectionDockingView.tsx | 386 ++++------- .../views/collections/CollectionSchemaView.tsx | 2 +- src/client/views/collections/CollectionSubView.tsx | 16 +- .../views/collections/CollectionTreeView.tsx | 2 +- .../views/collections/CollectionVideoView.tsx | 2 +- .../views/collections/DockedFrameRenderer.tsx | 116 ++++ .../collections/collectionFreeForm/MarqueeView.tsx | 18 +- src/client/views/nodes/DocumentView.tsx | 4 +- src/mobile/ImageUpload.tsx | 4 +- src/new_fields/Doc.ts | 12 + src/new_fields/util.ts | 1 + .../authentication/models/current_user_utils.ts | 8 +- 19 files changed, 835 insertions(+), 607 deletions(-) create mode 100644 src/client/views/collections/DockedFrameRenderer.tsx (limited to 'src') diff --git a/.vscode/settings.json b/.vscode/settings.json index fc315ffaf..5df697fee 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -9,5 +9,6 @@ "editor.formatOnSave": true, "editor.detectIndentation": false, "typescript.format.insertSpaceAfterOpeningAndBeforeClosingNonemptyBrackets": false, - "typescript.format.insertSpaceAfterOpeningAndBeforeClosingNonemptyBraces": true + "typescript.format.insertSpaceAfterOpeningAndBeforeClosingNonemptyBraces": true, + "search.usePCRE2": true } \ No newline at end of file diff --git a/src/client/DocServer.ts b/src/client/DocServer.ts index cbcf751ee..d759b4757 100644 --- a/src/client/DocServer.ts +++ b/src/client/DocServer.ts @@ -47,36 +47,81 @@ export namespace DocServer { } } + /** + * Given a list of Doc GUIDs, this utility function will asynchronously attempt to fetch each document + * associated with a given input id, first looking in the RefField cache and then communicating with + * the server if the document was not found there. + * + * @param ids the ids that map to the reqested documents + */ export async function GetRefFields(ids: string[]): Promise<{ [id: string]: Opt }> { const requestedIds: string[] = []; const waitingIds: string[] = []; const promises: Promise>[] = []; const map: { [id: string]: Opt } = {}; + + // 1) An initial pass through the cache to determine which documents need to be fetched, + // which are already in the process of being fetched and which already exist in the + // cache for (const id of ids) { const cached = _cache[id]; + if (cached === undefined) { + // NOT CACHED => we'll have to send a request to the server requestedIds.push(id); } else if (cached instanceof Promise) { + // BEING CACHED => someone else previously (likely recently) called GetRefFields, + // and requested one of the documents I'm looking for. Shouldn't fetch again, just + // wait until this promise is resolved (see the second to last line of the function) promises.push(cached); waitingIds.push(id); } else { + // CACHED => great, let's just add it to the field map map[id] = cached; } } - const prom = Utils.EmitCallback(_socket, MessageStore.GetRefFields, requestedIds).then(fields => { + + // 2) Synchronously, we emit a single callback to the server requesting the documents for the given ids. + // This returns a promise, which, when resolved, indicates that all the JSON serialized versions of + // the fields have been returned from the server + const fieldsReceived: Promise = Utils.EmitCallback(_socket, MessageStore.GetRefFields, requestedIds); + + // 3) When the serialized RefFields have been received, go head and begin deserializing them into objects. + // Here, once deserialized, we also invoke .proto to 'load' the documents' prototypes, which ensures that all + // future .proto calls won't have to go farther than the cache to get their actual value. + const fieldsDeserialized = fieldsReceived.then(async fields => { const fieldMap: { [id: string]: RefField } = {}; + const deserializedFields: any = []; for (const field of fields) { if (field !== undefined) { - fieldMap[field.id] = SerializationHelper.Deserialize(field); + // deserialize + let deserialized: any = SerializationHelper.Deserialize(field); + fieldMap[field.id] = deserialized; + deserializedFields.push(deserialized.proto); } } - + // this actually handles the loeading of prototypes + await Promise.all(deserializedFields); return fieldMap; }); - requestedIds.forEach(id => _cache[id] = prom.then(fields => fields[id])); - const fields = await prom; + + // 4) Here, for each of the documents we've requested *ourselves* (i.e. weren't promises or found in the cache) + // we set the value at the field's id to a promise that will resolve to the field. + // When we find that promises exist at keys in the cache, THIS is where they were set, just by some other caller (method). + requestedIds.forEach(id => _cache[id] = fieldsDeserialized.then(fields => fields[id])); + + // 5) At this point, all fields have a) been returned from the server and b) been deserialized into actual Field objects whose + // prototype documents, if any, have also been fetched and cached. + const fields = await fieldsDeserialized; + + // 6) With this confidence, we can now go through and update the cache at the ids of the fields that + // we explicitly had to fetch. To finish it off, we add whatever value we've come up with for a given + // id to the soon to be returned field mapping. requestedIds.forEach(id => { const field = fields[id]; + // either way, overwrite or delete any promises that we inserted as flags + // to indicate that the field was in the process of being fetched. Now everything + // should be an actual value within or entirely absent from the cache. if (field !== undefined) { _cache[id] = field; } else { @@ -84,14 +129,23 @@ export namespace DocServer { } map[id] = field; }); - await Promise.all(requestedIds.map(async id => { - const field = fields[id]; - if (field) { - await (field as any).proto; - } - })); - const otherFields = await Promise.all(promises); - waitingIds.forEach((id, index) => map[id] = otherFields[index]); + + // 7) Those promises we encountered in the else if of 1), which represent + // other callers having already submitted a request to the server for (a) document(s) + // in which we're interested, must still be awaited so that we can return the proper + // values for those as well. + // + // Fortunately, those other callers will also hit their own version of 6) and clean up + // the shared cache when these promises resolve, so all we have to do is... + const otherCallersFetching = await Promise.all(promises); + // ...extract the RefFields returned from the resolution of those promises and add them to our + // own map. + waitingIds.forEach((id, index) => map[id] = otherCallersFetching[index]); + + // Now, we return our completed mapping from all of the ids that were passed into the method + // to their actual RefField | undefined values. This return value either becomes the input + // argument to the caller's promise (i.e. GetRefFields.then(map => //do something with map...)) + // or it is the direct return result if the promise is awaited. return map; } diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index b346e1570..b10954636 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -18,7 +18,6 @@ import { action } from "mobx"; import { ColumnAttributeModel } from "../northstar/core/attribute/AttributeModel"; import { AttributeTransformationModel } from "../northstar/core/attribute/AttributeTransformationModel"; import { AggregateFunction } from "../northstar/model/idea/idea"; -import { Template } from "../views/Templates"; import { MINIMIZED_ICON_SIZE } from "../views/globalCssVariables.scss"; import { IconBox } from "../views/nodes/IconBox"; import { Field, Doc, Opt } from "../../new_fields/Doc"; @@ -30,7 +29,7 @@ import { Cast, NumCast } from "../../new_fields/Types"; import { IconField } from "../../new_fields/IconField"; import { listSpec } from "../../new_fields/Schema"; import { DocServer } from "../DocServer"; -import { StrokeData, InkField } from "../../new_fields/InkField"; +import { InkField } from "../../new_fields/InkField"; import { dropActionType } from "../util/DragManager"; import { DateField } from "../../new_fields/DateField"; import { UndoManager } from "../util/UndoManager"; @@ -67,345 +66,486 @@ export interface DocumentOptions { dbDoc?: Doc; // [key: string]: Opt; } -const delegateKeys = ["x", "y", "width", "height", "panX", "panY"]; -export namespace DocUtils { - export function MakeLink(source: Doc, target: Doc) { - let protoSrc = source.proto ? source.proto : source; - let protoTarg = target.proto ? target.proto : target; - UndoManager.RunInBatch(() => { - let linkDoc = Docs.TextDocument({ width: 100, height: 30, borderRounding: -1 }); - //let linkDoc = new Doc; - linkDoc.proto!.title = "-link name-"; - linkDoc.proto!.linkDescription = ""; - linkDoc.proto!.linkTags = "Default"; +export namespace Docs { - linkDoc.proto!.linkedTo = target; - linkDoc.proto!.linkedToPage = target.curPage; - linkDoc.proto!.linkedFrom = source; - linkDoc.proto!.linkedFromPage = source.curPage; + export namespace Prototypes { - let linkedFrom = Cast(protoTarg.linkedFromDocs, listSpec(Doc)); - if (!linkedFrom) { - protoTarg.linkedFromDocs = linkedFrom = new List(); - } - linkedFrom.push(linkDoc); + // the complete list of document prototypes and their ids + export let textProto: Doc; const textProtoId = "textProto"; + export let histoProto: Doc; const histoProtoId = "histoProto"; + export let imageProto: Doc; const imageProtoId = "imageProto"; + export let webProto: Doc; const webProtoId = "webProto"; + export let collProto: Doc; const collProtoId = "collectionProto"; + export let kvpProto: Doc; const kvpProtoId = "kvpProto"; + export let videoProto: Doc; const videoProtoId = "videoProto"; + export let audioProto: Doc; const audioProtoId = "audioProto"; + export let pdfProto: Doc; const pdfProtoId = "pdfProto"; + export let iconProto: Doc; const iconProtoId = "iconProto"; - let linkedTo = Cast(protoSrc.linkedToDocs, listSpec(Doc)); - if (!linkedTo) { - protoSrc.linkedToDocs = linkedTo = new List(); + /** + * This function loads or initializes the prototype for each docment type. + * + * This is an asynchronous function because it has to attempt + * to fetch the prototype documents from the server. + * + * Once we have this object that maps the prototype ids to a potentially + * undefined document, we either initialize our private prototype + * variables with the document returned from the server or, if prototypes + * haven't been initialized, the newly initialized prototype document. + */ + export async function initialize(): Promise { + // non-guid string ids for each document prototype + let protoIds = [textProtoId, histoProtoId, collProtoId, imageProtoId, webProtoId, kvpProtoId, videoProtoId, audioProtoId, pdfProtoId, iconProtoId] + // fetch the actual prototype documents from the server + let actualProtos = await DocServer.GetRefFields(protoIds); + + // initialize prototype documents + textProto = actualProtos[textProtoId] as Doc || CreateTextProto(); + histoProto = actualProtos[histoProtoId] as Doc || CreateHistogramProto(); + collProto = actualProtos[collProtoId] as Doc || CreateCollectionProto(); + imageProto = actualProtos[imageProtoId] as Doc || CreateImageProto(); + webProto = actualProtos[webProtoId] as Doc || CreateWebProto(); + kvpProto = actualProtos[kvpProtoId] as Doc || CreateKVPProto(); + videoProto = actualProtos[videoProtoId] as Doc || CreateVideoProto(); + audioProto = actualProtos[audioProtoId] as Doc || CreateAudioProto(); + pdfProto = actualProtos[pdfProtoId] as Doc || CreatePdfProto(); + iconProto = actualProtos[iconProtoId] as Doc || CreateIconProto(); + } + + /** + * This is a convenience method that is used to initialize + * prototype documents for the first time. + * + * @param protoId the id of the prototype, indicating the specific prototype + * to initialize (see the *protoId list at the top of the namespace) + * @param title the prototype document's title, follows *-PROTO + * @param layout the layout key for this prototype and thus the + * layout key that all delegates will inherit + * @param options any value specified in the DocumentOptions object likewise + * becomes the default value for that key for all delegates + */ + function buildPrototype(protoId: string, title: string, layout: string, options: DocumentOptions): Doc { + return Doc.assign(new Doc(protoId, true), { ...options, title: title, layout: layout, baseLayout: layout }); + } + + // INDIVIDUAL INITIALIZERS + + function CreateImageProto(): Doc { + let defaultAttrs = { + x: 0, + y: 0, + nativeWidth: 600, + width: 300, + backgroundLayout: ImageBox.LayoutString(), + curPage: 0 + }; + return buildPrototype(imageProtoId, "IMAGE_PROTO", CollectionView.LayoutString("annotations"), defaultAttrs); + } + + function CreateHistogramProto(): Doc { + let defaultAttrs = { + x: 0, + y: 0, + width: 300, + height: 300, + backgroundColor: "black", + backgroundLayout: + HistogramBox.LayoutString() + }; + return buildPrototype(histoProtoId, "HISTO PROTO", CollectionView.LayoutString("annotations"), defaultAttrs); + } + + function CreateIconProto(): Doc { + let defaultAttrs = { + x: 0, + y: 0, + width: Number(MINIMIZED_ICON_SIZE), + height: Number(MINIMIZED_ICON_SIZE) + }; + return buildPrototype(iconProtoId, "ICON_PROTO", IconBox.LayoutString(), defaultAttrs); + } + + function CreateTextProto(): Doc { + let defaultAttrs = { + x: 0, + y: 0, + width: 300, + height: 150, + backgroundColor: "#f1efeb" + }; + return buildPrototype(textProtoId, "TEXT_PROTO", FormattedTextBox.LayoutString(), defaultAttrs); + } + + function CreatePdfProto(): Doc { + let defaultAttrs = { + x: 0, + y: 0, + nativeWidth: 1200, + width: 300, + backgroundLayout: PDFBox.LayoutString(), + curPage: 1 + }; + return buildPrototype(pdfProtoId, "PDF_PROTO", CollectionPDFView.LayoutString("annotations"), defaultAttrs); + } + + function CreateWebProto(): Doc { + let defaultAttrs = { + x: 0, + y: 0, + width: 300, + height: 300 + }; + return buildPrototype(webProtoId, "WEB_PROTO", WebBox.LayoutString(), defaultAttrs); + } + + function CreateCollectionProto(): Doc { + let defaultAttrs = { + panX: 0, + panY: 0, + scale: 1, + width: 500, + height: 500 + }; + return buildPrototype(collProtoId, "COLLECTION_PROTO", CollectionView.LayoutString("data"), defaultAttrs); + } + + function CreateKVPProto(): Doc { + let defaultAttrs = { + x: 0, + y: 0, + width: 300, + height: 150 + }; + return buildPrototype(kvpProtoId, "KVP_PROTO", KeyValueBox.LayoutString(), defaultAttrs); + } + + function CreateVideoProto(): Doc { + let defaultAttrs = { + x: 0, + y: 0, + nativeWidth: 600, + width: 300, + backgroundLayout: VideoBox.LayoutString(), + curPage: 0 + }; + return buildPrototype(videoProtoId, "VIDEO_PROTO", CollectionVideoView.LayoutString("annotations"), defaultAttrs); + } + + function CreateAudioProto(): Doc { + let defaultAttrs = { + x: 0, + y: 0, + width: 300, + height: 150 } - linkedTo.push(linkDoc); - return linkDoc; - }, "make link"); + return buildPrototype(audioProtoId, "AUDIO_PROTO", AudioBox.LayoutString(), defaultAttrs); + } } + /** + * Encapsulates the factory used to create new document instances + * delegated from top-level prototypes + */ + export namespace Create { -} + const delegateKeys = ["x", "y", "width", "height", "panX", "panY"]; -export namespace Docs { - let textProto: Doc; - let histoProto: Doc; - let imageProto: Doc; - let webProto: Doc; - let collProto: Doc; - let kvpProto: Doc; - let videoProto: Doc; - let audioProto: Doc; - let pdfProto: Doc; - let iconProto: Doc; - const textProtoId = "textProto"; - const histoProtoId = "histoProto"; - const pdfProtoId = "pdfProto"; - const imageProtoId = "imageProto"; - const webProtoId = "webProto"; - const collProtoId = "collectionProto"; - const kvpProtoId = "kvpProto"; - const videoProtoId = "videoProto"; - const audioProtoId = "audioProto"; - const iconProtoId = "iconProto"; - - export function initProtos(): Promise { - return DocServer.GetRefFields([textProtoId, histoProtoId, collProtoId, imageProtoId, webProtoId, kvpProtoId, videoProtoId, audioProtoId, pdfProtoId, iconProtoId]).then(fields => { - textProto = fields[textProtoId] as Doc || CreateTextPrototype(); - histoProto = fields[histoProtoId] as Doc || CreateHistogramPrototype(); - collProto = fields[collProtoId] as Doc || CreateCollectionPrototype(); - imageProto = fields[imageProtoId] as Doc || CreateImagePrototype(); - webProto = fields[webProtoId] as Doc || CreateWebPrototype(); - kvpProto = fields[kvpProtoId] as Doc || CreateKVPPrototype(); - videoProto = fields[videoProtoId] as Doc || CreateVideoPrototype(); - audioProto = fields[audioProtoId] as Doc || CreateAudioPrototype(); - pdfProto = fields[pdfProtoId] as Doc || CreatePdfPrototype(); - iconProto = fields[iconProtoId] as Doc || CreateIconPrototype(); - }); - } + /** + * This function receives the relevant document prototype and uses + * it to create a new of that base-level prototype, or the + * underlying data document, which it then delegates again + * to create the view document. + * + * It also takes the opportunity to register the user + * that created the document and the time of creation. + * + * @param proto the specific document prototype off of which to model + * this new instance (textProto, imageProto, etc.) + * @param data the Field to store at this new instance's data key + * @param options any initial values to provide for this new instance + * @param delegId if applicable, an existing document id. If undefined, Doc's + * constructor just generates a new GUID. This is currently used + * only when creating a DockDocument from the current user's already existing + * main document. + */ + function CreateInstanceFromProto(proto: Doc, data: Field, options: DocumentOptions, delegId?: string) { + const { omit: protoProps, extract: delegateProps } = OmitKeys(options, delegateKeys); - function setupPrototypeOptions(protoId: string, title: string, layout: string, options: DocumentOptions): Doc { - return Doc.assign(new Doc(protoId, true), { ...options, title: title, layout: layout, baseLayout: layout }); - } - function SetInstanceOptions(doc: Doc, options: DocumentOptions, value: U) { - const deleg = Doc.MakeDelegate(doc); - deleg.data = value; - return Doc.assign(deleg, options); - } - function SetDelegateOptions(doc: Doc, options: DocumentOptions, id?: string) { - const deleg = Doc.MakeDelegate(doc, id); - return Doc.assign(deleg, options); - } + if (!("author" in protoProps)) { + protoProps.author = CurrentUserUtils.email; + } - function CreateImagePrototype(): Doc { - let imageProto = setupPrototypeOptions(imageProtoId, "IMAGE_PROTO", CollectionView.LayoutString("annotations"), - { x: 0, y: 0, nativeWidth: 600, width: 300, backgroundLayout: ImageBox.LayoutString(), curPage: 0 }); - return imageProto; - } + if (!("creationDate" in protoProps)) { + protoProps.creationDate = new DateField; + } - function CreateHistogramPrototype(): Doc { - let histoProto = setupPrototypeOptions(histoProtoId, "HISTO PROTO", CollectionView.LayoutString("annotations"), - { x: 0, y: 0, width: 300, height: 300, backgroundColor: "black", backgroundLayout: HistogramBox.LayoutString() }); - return histoProto; - } - function CreateIconPrototype(): Doc { - let iconProto = setupPrototypeOptions(iconProtoId, "ICON_PROTO", IconBox.LayoutString(), - { x: 0, y: 0, width: Number(MINIMIZED_ICON_SIZE), height: Number(MINIMIZED_ICON_SIZE) }); - return iconProto; - } - function CreateTextPrototype(): Doc { - let textProto = setupPrototypeOptions(textProtoId, "TEXT_PROTO", FormattedTextBox.LayoutString(), - { x: 0, y: 0, width: 300, height: 150, backgroundColor: "#f1efeb" }); - return textProto; - } - function CreatePdfPrototype(): Doc { - let pdfProto = setupPrototypeOptions(pdfProtoId, "PDF_PROTO", CollectionPDFView.LayoutString("annotations"), - { x: 0, y: 0, nativeWidth: 1200, width: 300, backgroundLayout: PDFBox.LayoutString(), curPage: 1 }); - return pdfProto; - } - function CreateWebPrototype(): Doc { - let webProto = setupPrototypeOptions(webProtoId, "WEB_PROTO", WebBox.LayoutString(), - { x: 0, y: 0, width: 300, height: 300 }); - return webProto; - } - function CreateCollectionPrototype(): Doc { - let collProto = setupPrototypeOptions(collProtoId, "COLLECTION_PROTO", CollectionView.LayoutString("data"), - { panX: 0, panY: 0, scale: 1, width: 500, height: 500 }); - return collProto; - } + protoProps.isPrototype = true; - function CreateKVPPrototype(): Doc { - let kvpProto = setupPrototypeOptions(kvpProtoId, "KVP_PROTO", KeyValueBox.LayoutString(), - { x: 0, y: 0, width: 300, height: 150 }); - return kvpProto; - } - function CreateVideoPrototype(): Doc { - let videoProto = setupPrototypeOptions(videoProtoId, "VIDEO_PROTO", CollectionVideoView.LayoutString("annotations"), - { x: 0, y: 0, nativeWidth: 600, width: 300, backgroundLayout: VideoBox.LayoutString(), curPage: 0 }); - return videoProto; - } - function CreateAudioPrototype(): Doc { - let audioProto = setupPrototypeOptions(audioProtoId, "AUDIO_PROTO", AudioBox.LayoutString(), - { x: 0, y: 0, width: 300, height: 150 }); - return audioProto; - } + let dataDoc = MakeDataDelegate(proto, protoProps, data); + let viewDoc = Doc.MakeDelegate(dataDoc, delegId); - function CreateInstance(proto: Doc, data: Field, options: DocumentOptions, delegId?: string) { - const { omit: protoProps, extract: delegateProps } = OmitKeys(options, delegateKeys); - if (!("author" in protoProps)) { - protoProps.author = CurrentUserUtils.email; + return Doc.assign(viewDoc, delegateProps); } - if (!("creationDate" in protoProps)) { - protoProps.creationDate = new DateField; + + /** + * This function receives the relevant top level document prototype + * and models a new instance by delegating from it. + * + * Note that it stores the data it recieves at the delegate's data key, + * and applies any document options to this new delegate / instance. + * @param proto the prototype from which to model this new delegate + * @param options initial values to apply to this new delegate + * @param value the data to store in this new delegate + */ + function MakeDataDelegate(proto: Doc, options: DocumentOptions, value: D) { + const deleg = Doc.MakeDelegate(proto); + deleg.data = value; + return Doc.assign(deleg, options); } - protoProps.isPrototype = true; - return SetDelegateOptions(SetInstanceOptions(proto, protoProps, data), delegateProps, delegId); - } + export function ImageDocument(url: string, options: DocumentOptions = {}) { + let inst = CreateInstanceFromProto(Prototypes.imageProto, new ImageField(new URL(url)), { title: path.basename(url), ...options }); + requestImageSize(window.origin + RouteStore.corsProxy + "/" + url) + .then((size: any) => { + let aspect = size.height / size.width; + if (!inst.proto!.nativeWidth) { + inst.proto!.nativeWidth = size.width; + } + inst.proto!.nativeHeight = Number(inst.proto!.nativeWidth!) * aspect; + inst.proto!.height = NumCast(inst.proto!.width) * aspect; + }) + .catch((err: any) => console.log(err)); + return inst; - export function ImageDocument(url: string, options: DocumentOptions = {}) { - let inst = CreateInstance(imageProto, new ImageField(new URL(url)), { title: path.basename(url), ...options }); - requestImageSize(window.origin + RouteStore.corsProxy + "/" + url) - .then((size: any) => { - let aspect = size.height / size.width; - if (!inst.proto!.nativeWidth) { - inst.proto!.nativeWidth = size.width; + // let doc = SetInstanceOptions(GetImagePrototype(), { ...options, layoutKeys: [KeyStore.Data, KeyStore.Annotations, KeyStore.Caption] }, + // [new URL(url), ImageField]); + // doc.SetText(KeyStore.Caption, "my caption..."); + // doc.SetText(KeyStore.BackgroundLayout, EmbeddedCaption()); + // doc.SetText(KeyStore.OverlayLayout, FixedCaption()); + // return doc; + } + + export function VideoDocument(url: string, options: DocumentOptions = {}) { + return CreateInstanceFromProto(Prototypes.videoProto, new VideoField(new URL(url)), options); + } + + export function AudioDocument(url: string, options: DocumentOptions = {}) { + return CreateInstanceFromProto(Prototypes.audioProto, new AudioField(new URL(url)), options); + } + + export function HistogramDocument(histoOp: HistogramOperation, options: DocumentOptions = {}) { + return CreateInstanceFromProto(Prototypes.histoProto, new HistogramField(histoOp), options); + } + + export function TextDocument(options: DocumentOptions = {}) { + return CreateInstanceFromProto(Prototypes.textProto, "", options); + } + + export function IconDocument(icon: string, options: DocumentOptions = {}) { + return CreateInstanceFromProto(Prototypes.iconProto, new IconField(icon), options); + } + + export function PdfDocument(url: string, options: DocumentOptions = {}) { + return CreateInstanceFromProto(Prototypes.pdfProto, new PdfField(new URL(url)), options); + } + + export async function DBDocument(url: string, options: DocumentOptions = {}, columnOptions: DocumentOptions = {}) { + let schemaName = options.title ? options.title : "-no schema-"; + let ctlog = await Gateway.Instance.GetSchema(url, schemaName); + if (ctlog && ctlog.schemas) { + let schema = ctlog.schemas[0]; + let schemaDoc = Docs.Create.TreeDocument([], { ...options, nativeWidth: undefined, nativeHeight: undefined, width: 150, height: 100, title: schema.displayName! }); + let schemaDocuments = Cast(schemaDoc.data, listSpec(Doc), []); + if (!schemaDocuments) { + return; } - inst.proto!.nativeHeight = Number(inst.proto!.nativeWidth!) * aspect; - inst.proto!.height = NumCast(inst.proto!.width) * aspect; - }) - .catch((err: any) => console.log(err)); - return inst; - // let doc = SetInstanceOptions(GetImagePrototype(), { ...options, layoutKeys: [KeyStore.Data, KeyStore.Annotations, KeyStore.Caption] }, - // [new URL(url), ImageField]); - // doc.SetText(KeyStore.Caption, "my caption..."); - // doc.SetText(KeyStore.BackgroundLayout, EmbeddedCaption()); - // doc.SetText(KeyStore.OverlayLayout, FixedCaption()); - // return doc; - } - export function VideoDocument(url: string, options: DocumentOptions = {}) { - return CreateInstance(videoProto, new VideoField(new URL(url)), options); - } - export function AudioDocument(url: string, options: DocumentOptions = {}) { - return CreateInstance(audioProto, new AudioField(new URL(url)), options); - } + CurrentUserUtils.AddNorthstarSchema(schema, schemaDoc); + const docs = schemaDocuments; + CurrentUserUtils.GetAllNorthstarColumnAttributes(schema).map(attr => { + DocServer.GetRefField(attr.displayName! + ".alias").then(action((field: Opt) => { + if (field instanceof Doc) { + docs.push(field); + } else { + var atmod = new ColumnAttributeModel(attr); + let histoOp = new HistogramOperation(schema.displayName!, + new AttributeTransformationModel(atmod, AggregateFunction.None), + new AttributeTransformationModel(atmod, AggregateFunction.Count), + new AttributeTransformationModel(atmod, AggregateFunction.Count)); + docs.push(Docs.Create.HistogramDocument(histoOp, { ...columnOptions, width: 200, height: 200, title: attr.displayName! })); + } + })); + }); + return schemaDoc; + } + return Docs.Create.TreeDocument([], { width: 50, height: 100, title: schemaName }); + } - export function HistogramDocument(histoOp: HistogramOperation, options: DocumentOptions = {}) { - return CreateInstance(histoProto, new HistogramField(histoOp), options); - } - export function TextDocument(options: DocumentOptions = {}) { - return CreateInstance(textProto, "", options); - } - export function IconDocument(icon: string, options: DocumentOptions = {}) { - return CreateInstance(iconProto, new IconField(icon), options); - } - export function PdfDocument(url: string, options: DocumentOptions = {}) { - return CreateInstance(pdfProto, new PdfField(new URL(url)), options); - } + export function WebDocument(url: string, options: DocumentOptions = {}) { + return CreateInstanceFromProto(Prototypes.webProto, new WebField(new URL(url)), options); + } + + export function HtmlDocument(html: string, options: DocumentOptions = {}) { + return CreateInstanceFromProto(Prototypes.webProto, new HtmlField(html), options); + } - export async function DBDocument(url: string, options: DocumentOptions = {}, columnOptions: DocumentOptions = {}) { - let schemaName = options.title ? options.title : "-no schema-"; - let ctlog = await Gateway.Instance.GetSchema(url, schemaName); - if (ctlog && ctlog.schemas) { - let schema = ctlog.schemas[0]; - let schemaDoc = Docs.TreeDocument([], { ...options, nativeWidth: undefined, nativeHeight: undefined, width: 150, height: 100, title: schema.displayName! }); - let schemaDocuments = Cast(schemaDoc.data, listSpec(Doc), []); - if (!schemaDocuments) { - return; + export function KVPDocument(document: Doc, options: DocumentOptions = {}) { + return CreateInstanceFromProto(Prototypes.kvpProto, document, { title: document.title + ".kvp", ...options }); + } + + export function FreeformDocument(documents: Array, options: DocumentOptions, makePrototype: boolean = true) { + if (!makePrototype) { + return MakeDataDelegate(Prototypes.collProto, { ...options, viewType: CollectionViewType.Freeform }, new List(documents)); } - CurrentUserUtils.AddNorthstarSchema(schema, schemaDoc); - const docs = schemaDocuments; - CurrentUserUtils.GetAllNorthstarColumnAttributes(schema).map(attr => { - DocServer.GetRefField(attr.displayName! + ".alias").then(action((field: Opt) => { - if (field instanceof Doc) { - docs.push(field); - } else { - var atmod = new ColumnAttributeModel(attr); - let histoOp = new HistogramOperation(schema.displayName!, - new AttributeTransformationModel(atmod, AggregateFunction.None), - new AttributeTransformationModel(atmod, AggregateFunction.Count), - new AttributeTransformationModel(atmod, AggregateFunction.Count)); - docs.push(Docs.HistogramDocument(histoOp, { ...columnOptions, width: 200, height: 200, title: attr.displayName! })); + return CreateInstanceFromProto(Prototypes.collProto, new List(documents), { schemaColumns: new List(["title"]), ...options, viewType: CollectionViewType.Freeform }); + } + + export function SchemaDocument(schemaColumns: string[], documents: Array, options: DocumentOptions) { + return CreateInstanceFromProto(Prototypes.collProto, new List(documents), { schemaColumns: new List(schemaColumns), ...options, viewType: CollectionViewType.Schema }); + } + + export function TreeDocument(documents: Array, options: DocumentOptions) { + return CreateInstanceFromProto(Prototypes.collProto, new List(documents), { schemaColumns: new List(["title"]), ...options, viewType: CollectionViewType.Tree }); + } + + export function StackingDocument(documents: Array, options: DocumentOptions) { + return CreateInstanceFromProto(Prototypes.collProto, new List(documents), { schemaColumns: new List(["title"]), ...options, viewType: CollectionViewType.Stacking }); + } + + export function DockDocument(documents: Array, config: string, options: DocumentOptions, id?: string) { + return CreateInstanceFromProto(Prototypes.collProto, new List(documents), { ...options, viewType: CollectionViewType.Docking, dockingConfig: config }, id); + } + + export type DocConfig = { + doc: Doc, + initialWidth?: number + } + + export function StandardCollectionDockingDocument(configs: Array, options: DocumentOptions, id?: string, type: string = "row") { + let layoutConfig = { + content: [ + { + type: type, + content: [ + ...configs.map(config => CollectionDockingView.makeDocumentConfig(config.doc, config.initialWidth)) + ] } - })); - }); - return schemaDoc; + ] + }; + return DockDocument(configs.map(c => c.doc), JSON.stringify(layoutConfig), options, id); } - return Docs.TreeDocument([], { width: 50, height: 100, title: schemaName }); - } - export function WebDocument(url: string, options: DocumentOptions = {}) { - return CreateInstance(webProto, new WebField(new URL(url)), options); - } - export function HtmlDocument(html: string, options: DocumentOptions = {}) { - return CreateInstance(webProto, new HtmlField(html), options); - } - export function KVPDocument(document: Doc, options: DocumentOptions = {}) { - return CreateInstance(kvpProto, document, { title: document.title + ".kvp", ...options }); - } - export function FreeformDocument(documents: Array, options: DocumentOptions, makePrototype: boolean = true) { - if (!makePrototype) { - return SetInstanceOptions(collProto, { ...options, viewType: CollectionViewType.Freeform }, new List(documents)); + + export function CaptionDocument(doc: Doc) { + const captionDoc = Doc.MakeAlias(doc); + captionDoc.overlayLayout = Templating.FixedCaption(); + captionDoc.width = Cast(doc.width, "number", 0); + captionDoc.height = Cast(doc.height, "number", 0); + return captionDoc; } - return CreateInstance(collProto, new List(documents), { schemaColumns: new List(["title"]), ...options, viewType: CollectionViewType.Freeform }); - } - export function SchemaDocument(schemaColumns: string[], documents: Array, options: DocumentOptions) { - return CreateInstance(collProto, new List(documents), { schemaColumns: new List(schemaColumns), ...options, viewType: CollectionViewType.Schema }); - } - export function TreeDocument(documents: Array, options: DocumentOptions) { - return CreateInstance(collProto, new List(documents), { schemaColumns: new List(["title"]), ...options, viewType: CollectionViewType.Tree }); - } - export function StackingDocument(documents: Array, options: DocumentOptions) { - return CreateInstance(collProto, new List(documents), { schemaColumns: new List(["title"]), ...options, viewType: CollectionViewType.Stacking }); - } - export function DockDocument(documents: Array, config: string, options: DocumentOptions, id?: string) { - return CreateInstance(collProto, new List(documents), { ...options, viewType: CollectionViewType.Docking, dockingConfig: config }, id); - } - export type DocConfig = { - doc: Doc, - initialWidth?: number - } - export function StandardCollectionDockingDocument(configs: Array, options: DocumentOptions, id?: string, type: string = "row") { - let layoutConfig = { - content: [ - { - type: type, - content: [ - ...configs.map(config => CollectionDockingView.makeDocumentConfig(config.doc, config.initialWidth)) - ] - } - ] - }; - return DockDocument(configs.map(c => c.doc), JSON.stringify(layoutConfig), options, id); } - export function CaptionDocument(doc: Doc) { - const captionDoc = Doc.MakeAlias(doc); - captionDoc.overlayLayout = FixedCaption(); - captionDoc.width = Cast(doc.width, "number", 0); - captionDoc.height = Cast(doc.height, "number", 0); - return captionDoc; - } + export namespace Templating { - // example of custom display string for an image that shows a caption. - function EmbeddedCaption() { - return `
-
` - + ImageBox.LayoutString() + - `
-
` - + FormattedTextBox.LayoutString("caption") + - `
-
`; - } - export function FixedCaption(fieldName: string = "caption") { - return `
-
` - + FormattedTextBox.LayoutString(fieldName) + - `
-
`; - } + // example of custom display string for an image that shows a caption. + export function EmbeddedCaption() { + return `
+
` + + ImageBox.LayoutString() + + `
+
` + + FormattedTextBox.LayoutString("caption") + + `
+
`; + } - function OuterCaption() { - return (` -
-
- {layout} -
-
- -
-
- `); - } - function InnerCaption() { - return (` -
-
- {layout} -
-
- -
-
+ export function FixedCaption(fieldName: string = "caption") { + return `
+
` + + FormattedTextBox.LayoutString(fieldName) + + `
+
`; + } + + export function OuterCaption() { + return (` +
+
+ {layout} +
+
+ +
+
`); - } + } - /* + export function InnerCaption() { + return (` +
+
+ {layout} +
+
+ +
+
+ `); + } - this template requires an additional style setting on the collectionView-cont to make the layout relative - -.collectionView-cont { - position: relative; - width: 100%; - height: 100%; -} - */ - function Percentaption() { - return (` -
-
- {layout} -
-
- -
-
+ /* + this template requires an additional style setting on the collectionView-cont to make the layout relative + .collectionView-cont { + position: relative; + width: 100%; + height: 100%; + } + */ + export function PercentCaption() { + return (` +
+
+ {layout} +
+
+ +
+
`); + } + + } +} + +export namespace DocUtils { + + export function MakeLink(source: Doc, target: Doc) { + let protoSrc = source.proto ? source.proto : source; + let protoTarg = target.proto ? target.proto : target; + UndoManager.RunInBatch(() => { + let linkDoc = Docs.Create.TextDocument({ width: 100, height: 30, borderRounding: -1 }); + //let linkDoc = new Doc; + linkDoc.proto!.title = "-link name-"; + linkDoc.proto!.linkDescription = ""; + linkDoc.proto!.linkTags = "Default"; + + linkDoc.proto!.linkedTo = target; + linkDoc.proto!.linkedToPage = target.curPage; + linkDoc.proto!.linkedFrom = source; + linkDoc.proto!.linkedFromPage = source.curPage; + + let linkedFrom = Cast(protoTarg.linkedFromDocs, listSpec(Doc)); + if (!linkedFrom) { + protoTarg.linkedFromDocs = linkedFrom = new List(); + } + linkedFrom.push(linkDoc); + + let linkedTo = Cast(protoSrc.linkedToDocs, listSpec(Doc)); + if (!linkedTo) { + protoSrc.linkedToDocs = linkedTo = new List(); + } + linkedTo.push(linkDoc); + return linkDoc; + }, "make link"); } + } \ No newline at end of file diff --git a/src/client/views/DocumentDecorations.tsx b/src/client/views/DocumentDecorations.tsx index da9b1253e..787033455 100644 --- a/src/client/views/DocumentDecorations.tsx +++ b/src/client/views/DocumentDecorations.tsx @@ -274,7 +274,7 @@ export class DocumentDecorations extends React.Component<{}, { value: string }> @undoBatch @action createIcon = (selected: DocumentView[], layoutString: string): Doc => { let doc = selected[0].props.Document; - let iconDoc = Docs.IconDocument(layoutString); + let iconDoc = Docs.Create.IconDocument(layoutString); iconDoc.isButton = true; iconDoc.proto!.title = selected.length > 1 ? "-multiple-.icon" : StrCast(doc.title) + ".icon"; iconDoc.labelField = selected.length > 1 ? undefined : this._fieldKey; diff --git a/src/client/views/Main.tsx b/src/client/views/Main.tsx index 3d9750a85..98b14f9c8 100644 --- a/src/client/views/Main.tsx +++ b/src/client/views/Main.tsx @@ -5,7 +5,7 @@ import * as ReactDOM from 'react-dom'; import * as React from 'react'; (async () => { - await Docs.initProtos(); + await Docs.Prototypes.initialize(); await CurrentUserUtils.loadCurrentUser(); ReactDOM.render(, document.getElementById('root')); })(); diff --git a/src/client/views/MainView.tsx b/src/client/views/MainView.tsx index 426e2440a..984db0426 100644 --- a/src/client/views/MainView.tsx +++ b/src/client/views/MainView.tsx @@ -55,7 +55,7 @@ export class MainView extends React.Component { private set mainContainer(doc: Opt) { if (doc) { if (!("presentationView" in doc)) { - doc.presentationView = Docs.TreeDocument([], { title: "Presentation" }); + doc.presentationView = Docs.Create.TreeDocument([], { title: "Presentation" }); } CurrentUserUtils.UserDocument.activeWorkspace = doc; } @@ -151,12 +151,12 @@ export class MainView extends React.Component { createNewWorkspace = async (id?: string) => { const list = Cast(CurrentUserUtils.UserDocument.data, listSpec(Doc)); if (list) { - let freeformDoc = Docs.FreeformDocument([], { x: 0, y: 400, width: this.pwidth * .7, height: this.pheight, title: `WS collection ${list.length + 1}` }); + let freeformDoc = Docs.Create.FreeformDocument([], { x: 0, y: 400, width: this.pwidth * .7, height: this.pheight, title: `WS collection ${list.length + 1}` }); let configs = [ { doc: CurrentUserUtils.UserDocument, initialWidth: 150 }, { doc: freeformDoc, initialWidth: 600 } ] - let mainDoc = Docs.StandardCollectionDockingDocument(configs, { title: `Workspace ${list.length + 1}` }, id); + let mainDoc = Docs.Create.StandardCollectionDockingDocument(configs, { title: `Workspace ${list.length + 1}` }, id); list.push(mainDoc); // bcz: strangely, we need a timeout to prevent exceptions/issues initializing GoldenLayout (the rendering engine for Main Container) setTimeout(() => { @@ -242,18 +242,18 @@ export class MainView extends React.Component { let audiourl = "http://techslides.com/demos/samples/sample.mp3"; let videourl = "http://techslides.com/demos/sample-videos/small.mp4"; - let addTextNode = action(() => Docs.TextDocument({ borderRounding: -1, width: 200, height: 200, title: "a text note" })); - let addColNode = action(() => Docs.FreeformDocument([], { width: this.pwidth * .7, height: this.pheight, title: "a freeform collection" })); - let addDockingNode = action(() => Docs.StandardCollectionDockingDocument([{ doc: addColNode(), initialWidth: 200 }], { width: 200, height: 200, title: "a nested docking freeform collection" })); - let addSchemaNode = action(() => Docs.SchemaDocument(["title"], [], { width: 200, height: 200, title: "a schema collection" })); + let addTextNode = action(() => Docs.Create.TextDocument({ borderRounding: -1, width: 200, height: 200, title: "a text note" })); + let addColNode = action(() => Docs.Create.FreeformDocument([], { width: this.pwidth * .7, height: this.pheight, title: "a freeform collection" })); + let addDockingNode = action(() => Docs.Create.StandardCollectionDockingDocument([{ doc: addColNode(), initialWidth: 200 }], { width: 200, height: 200, title: "a nested docking freeform collection" })); + let addSchemaNode = action(() => Docs.Create.SchemaDocument(["title"], [], { width: 200, height: 200, title: "a schema collection" })); let addTreeNode = action(() => CurrentUserUtils.UserDocument); //let addTreeNode = action(() => Docs.TreeDocument([CurrentUserUtils.UserDocument], { width: 250, height: 400, title: "Library:" + CurrentUserUtils.email, dropAction: "alias" })); // let addTreeNode = action(() => Docs.TreeDocument(this._northstarSchemas, { width: 250, height: 400, title: "northstar schemas", dropAction: "copy" })); - let addVideoNode = action(() => Docs.VideoDocument(videourl, { width: 200, title: "video node" })); - let addPDFNode = action(() => Docs.PdfDocument(pdfurl, { width: 200, height: 200, title: "a pdf doc" })); - let addImageNode = action(() => Docs.ImageDocument(imgurl, { width: 200, title: "an image of a cat" })); - let addWebNode = action(() => Docs.WebDocument(weburl, { width: 200, height: 200, title: "a sample web page" })); - let addAudioNode = action(() => Docs.AudioDocument(audiourl, { width: 200, height: 200, title: "audio node" })); + let addVideoNode = action(() => Docs.Create.VideoDocument(videourl, { width: 200, title: "video node" })); + let addPDFNode = action(() => Docs.Create.PdfDocument(pdfurl, { width: 200, height: 200, title: "a pdf doc" })); + let addImageNode = action(() => Docs.Create.ImageDocument(imgurl, { width: 200, title: "an image of a cat" })); + let addWebNode = action(() => Docs.Create.WebDocument(weburl, { width: 200, height: 200, title: "a sample web page" })); + let addAudioNode = action(() => Docs.Create.AudioDocument(audiourl, { width: 200, height: 200, title: "audio node" })); let btns: [React.RefObject, IconName, string, () => Doc][] = [ [React.createRef(), "font", "Add Textbox", addTextNode], diff --git a/src/client/views/SearchBox.tsx b/src/client/views/SearchBox.tsx index 63d2065e2..7164d98a4 100644 --- a/src/client/views/SearchBox.tsx +++ b/src/client/views/SearchBox.tsx @@ -166,7 +166,7 @@ export class SearchBox extends React.Component { y += 300; } } - return Docs.FreeformDocument(docs, { width: 400, height: 400, panX: 175, panY: 175, backgroundColor: "grey", title: `Search Docs: "${this.searchString}"` }); + return Docs.Create.FreeformDocument(docs, { width: 400, height: 400, panX: 175, panY: 175, backgroundColor: "grey", title: `Search Docs: "${this.searchString}"` }); } // Useful queries: diff --git a/src/client/views/collections/CollectionDockingView.tsx b/src/client/views/collections/CollectionDockingView.tsx index dfb8fac35..e2bcb10ec 100644 --- a/src/client/views/collections/CollectionDockingView.tsx +++ b/src/client/views/collections/CollectionDockingView.tsx @@ -6,50 +6,32 @@ import * as ReactDOM from 'react-dom'; import Measure, { ContentRect } from "react-measure"; import * as GoldenLayout from "../../../client/goldenLayout"; import { Doc, Field, Opt, DocListCast } from "../../../new_fields/Doc"; -import { FieldId } from "../../../new_fields/RefField"; import { listSpec } from "../../../new_fields/Schema"; import { Cast, NumCast, StrCast } from "../../../new_fields/Types"; import { emptyFunction, returnTrue, Utils } from "../../../Utils"; import { DocServer } from "../../DocServer"; import { DragLinksAsDocuments, DragManager } from "../../util/DragManager"; -import { Transform } from '../../util/Transform'; import { undoBatch, UndoManager } from "../../util/UndoManager"; -import { DocumentView } from "../nodes/DocumentView"; import "./CollectionDockingView.scss"; import { SubCollectionViewProps } from "./CollectionSubView"; import React = require("react"); import { ParentDocSelector } from './ParentDocumentSelector'; import { DocumentManager } from '../../util/DocumentManager'; -import { CollectionViewType } from './CollectionBaseView'; import { Id } from '../../../new_fields/FieldSymbols'; -import { CurrentUserUtils } from '../../../server/authentication/models/current_user_utils'; +import { DockedFrameRenderer } from './DockedFrameRenderer'; @observer export class CollectionDockingView extends React.Component { public static TopLevel: CollectionDockingView; - public static makeDocumentConfig(document: Doc, width?: number) { - return { - type: 'react-component', - component: 'DocumentFrameRenderer', - title: document.title, - width: width, - props: { - documentId: document[Id], - } - }; - } - - private makeDocConfig = (document: Doc, width?: number) => { - const config = CollectionDockingView.makeDocumentConfig(document, width); - (config.props as any).parent = this; - return config; - } - private _goldenLayout: any = null; private _containerRef = React.createRef(); + reactionDisposer?: IReactionDisposer; + _removedDocs: Doc[] = []; private _flush: boolean = false; private _ignoreStateChange = ""; private _isPointerDown = false; + hack: boolean = false; + undohack: any = null; constructor(props: SubCollectionViewProps) { super(props); @@ -57,32 +39,93 @@ export class CollectionDockingView extends React.Component - CollectionDockingView.AddRightSplit(dragDoc, true).contentItems[0].tab._dragListener. - onMouseDown({ pageX: e.pageX, pageY: e.pageY, preventDefault: emptyFunction, button: 0 })); + + componentDidMount: () => void = () => { + if (this._containerRef.current) { + this.reactionDisposer = reaction( + () => StrCast(this.props.Document.dockingConfig), + () => { + if (!this._goldenLayout || this._ignoreStateChange !== this.retrieveConfiguration()) { + // Because this is in a set timeout, if this component unmounts right after mounting, + // we will leak a GoldenLayout, because we try to destroy it before we ever create it + setTimeout(() => this.setupGoldenLayout(), 1); + } + this._ignoreStateChange = ""; + }, { fireImmediately: true }); + + // window.addEventListener('resize', this.onResize); // bcz: would rather add this event to the parent node, but resize events only come from Window + } } - private openFullScreen = (document: Doc) => { - let newItemStackConfig = { - type: 'stack', - content: [this.makeDocConfig(document)] - }; - var docconfig = this._goldenLayout.root.layoutManager.createContentItem(newItemStackConfig, this._goldenLayout); - this._goldenLayout.root.contentItems[0].addChild(docconfig); - docconfig.callDownwards('_$init'); - this._goldenLayout._$maximiseItem(docconfig); - this._ignoreStateChange = this.retrieveConfiguration(); - this.stateChanged(); + componentWillUnmount: () => void = () => { + try { + this._goldenLayout.unbind('itemDropped', this.itemDropped); + this._goldenLayout.unbind('tabCreated', this.tabCreated); + this._goldenLayout.unbind('stackCreated', this.stackCreated); + this._goldenLayout.unbind('tabDestroyed', this.tabDestroyed); + } catch (e) { + console.log("Unable to unbind Golden Layout event listener...", e); + } + if (this._goldenLayout) this._goldenLayout.destroy(); + this._goldenLayout = null; + + if (this.reactionDisposer) { + this.reactionDisposer(); + } } - @action - public static OpenFullScreen(document: Doc, dockingView: CollectionDockingView = CollectionDockingView.TopLevel) { - dockingView.openFullScreen(document); + setupGoldenLayout() { + var config = StrCast(this.props.Document.dockingConfig); + if (config) { + if (!this._goldenLayout) { + this.initializeConfiguration(config); + } + else { + if (config === this.retrieveConfiguration()) { + return; + } + try { + this._goldenLayout.unbind('itemDropped', this.itemDropped); + this._goldenLayout.unbind('tabCreated', this.tabCreated); + this._goldenLayout.unbind('tabDestroyed', this.tabDestroyed); + this._goldenLayout.unbind('stackCreated', this.stackCreated); + } catch (e) { } + this._goldenLayout.destroy(); + this.initializeConfiguration(config); + } + this._goldenLayout.on('itemDropped', this.itemDropped); + this._goldenLayout.on('tabCreated', this.tabCreated); + this._goldenLayout.on('tabDestroyed', this.tabDestroyed); + this._goldenLayout.on('stackCreated', this.stackCreated); + this._goldenLayout.registerComponent('DocumentFrameRenderer', DockedFrameRenderer); + this._goldenLayout.container = this._containerRef.current; + if (this._goldenLayout.config.maximisedItemId === '__glMaximised') { + try { + this._goldenLayout.config.root.getItemsById(this._goldenLayout.config.maximisedItemId)[0].toggleMaximise(); + } catch (e) { + this._goldenLayout.config.maximisedItemId = null; + } + } + this._goldenLayout.init(); + } + } + + private makeDocConfig = (document: Doc, width?: number) => { + const config = CollectionDockingView.makeDocumentConfig(document, width); + (config.props as any).parent = this; + return config; + } + + public static makeDocumentConfig(document: Doc, width?: number) { + return { + type: 'react-component', + component: 'DocumentFrameRenderer', + title: document.title, + width: width, + props: { + documentId: document[Id], + } + }; } initializeConfiguration = (configText: string) => { @@ -109,44 +152,29 @@ export class CollectionDockingView extends React.Component { - let retVal = false; - if (dockingView._goldenLayout.root.contentItems[0].isRow) { - retVal = Array.from(dockingView._goldenLayout.root.contentItems[0].contentItems).some((child: any) => { - if (child.contentItems.length === 1 && child.contentItems[0].config.component === "DocumentFrameRenderer" && - Doc.AreProtosEqual(DocumentManager.Instance.getDocumentViewById(child.contentItems[0].config.props.documentId)!.Document, document)) { - child.contentItems[0].remove(); - dockingView.layoutChanged(document); - return true; - } else { - Array.from(child.contentItems).filter((tab: any) => tab.config.component === "DocumentFrameRenderer").some((tab: any, j: number) => { - if (Doc.AreProtosEqual(DocumentManager.Instance.getDocumentViewById(tab.config.props.documentId)!.Document, document)) { - child.contentItems[j].remove(); - child.config.activeItemIndex = Math.max(child.contentItems.length - 1, 0); - let docs = Cast(dockingView.props.Document.data, listSpec(Doc)); - docs && docs.indexOf(document) !== -1 && docs.splice(docs.indexOf(document), 1); - return true; - } - return false; - }); - } - return false; - }); - } - if (retVal) { - dockingView.stateChanged(); - } - return retVal; + public StartOtherDrag(dragDocs: Doc[], e: any) { + this.hack = true; + this.undohack = UndoManager.StartBatch("goldenDrag"); + dragDocs.map(dragDoc => + CollectionDockingView.AddRightSplit(dragDoc, true).contentItems[0].tab._dragListener. + onMouseDown({ pageX: e.pageX, pageY: e.pageY, preventDefault: emptyFunction, button: 0 })); } @action - layoutChanged(removed?: Doc) { - this._goldenLayout.root.callDownwards('setSize', [this._goldenLayout.width, this._goldenLayout.height]); - this._goldenLayout.emit('stateChanged'); + public static OpenFullScreen(document: Doc, dockingView: CollectionDockingView = CollectionDockingView.TopLevel) { + dockingView.openFullScreen(document); + } + + private openFullScreen = (document: Doc) => { + let newItemStackConfig = { + type: 'stack', + content: [this.makeDocConfig(document)] + }; + var docconfig = this._goldenLayout.root.layoutManager.createContentItem(newItemStackConfig, this._goldenLayout); + this._goldenLayout.root.contentItems[0].addChild(docconfig); + docconfig.callDownwards('_$init'); + this._goldenLayout._$maximiseItem(docconfig); this._ignoreStateChange = this.retrieveConfiguration(); - if (removed) CollectionDockingView.TopLevel._removedDocs.push(removed); this.stateChanged(); } @@ -209,75 +237,47 @@ export class CollectionDockingView extends React.Component { + let retVal = false; + if (dockingView._goldenLayout.root.contentItems[0].isRow) { + retVal = Array.from(dockingView._goldenLayout.root.contentItems[0].contentItems).some((child: any) => { + if (child.contentItems.length === 1 && child.contentItems[0].config.component === "DocumentFrameRenderer" && + Doc.AreProtosEqual(DocumentManager.Instance.getDocumentViewById(child.contentItems[0].config.props.documentId)!.Document, document)) { + child.contentItems[0].remove(); + dockingView.layoutChanged(document); + return true; + } else { + Array.from(child.contentItems).filter((tab: any) => tab.config.component === "DocumentFrameRenderer").some((tab: any, j: number) => { + if (Doc.AreProtosEqual(DocumentManager.Instance.getDocumentViewById(tab.config.props.documentId)!.Document, document)) { + child.contentItems[j].remove(); + child.config.activeItemIndex = Math.max(child.contentItems.length - 1, 0); + let docs = Cast(dockingView.props.Document.data, listSpec(Doc)); + docs && docs.indexOf(document) !== -1 && docs.splice(docs.indexOf(document), 1); + return true; + } + return false; + }); } - } - this._goldenLayout.init(); + return false; + }); } - } - reactionDisposer?: IReactionDisposer; - componentDidMount: () => void = () => { - if (this._containerRef.current) { - this.reactionDisposer = reaction( - () => StrCast(this.props.Document.dockingConfig), - () => { - if (!this._goldenLayout || this._ignoreStateChange !== this.retrieveConfiguration()) { - // Because this is in a set timeout, if this component unmounts right after mounting, - // we will leak a GoldenLayout, because we try to destroy it before we ever create it - setTimeout(() => this.setupGoldenLayout(), 1); - } - this._ignoreStateChange = ""; - }, { fireImmediately: true }); - - // window.addEventListener('resize', this.onResize); // bcz: would rather add this event to the parent node, but resize events only come from Window + if (retVal) { + dockingView.stateChanged(); } + return retVal; } - componentWillUnmount: () => void = () => { - try { - this._goldenLayout.unbind('itemDropped', this.itemDropped); - this._goldenLayout.unbind('tabCreated', this.tabCreated); - this._goldenLayout.unbind('stackCreated', this.stackCreated); - this._goldenLayout.unbind('tabDestroyed', this.tabDestroyed); - } catch (e) { - } - if (this._goldenLayout) this._goldenLayout.destroy(); - this._goldenLayout = null; - // window.removeEventListener('resize', this.onResize); - - if (this.reactionDisposer) { - this.reactionDisposer(); - } + @action + layoutChanged(removed?: Doc) { + this._goldenLayout.root.callDownwards('setSize', [this._goldenLayout.width, this._goldenLayout.height]); + this._goldenLayout.emit('stateChanged'); + this._ignoreStateChange = this.retrieveConfiguration(); + if (removed) CollectionDockingView.TopLevel._removedDocs.push(removed); + this.stateChanged(); } + @action onResize = (size: ContentRect) => { // bcz: since GoldenLayout isn't a React component itself, we need to notify it to resize when its document container's size has changed @@ -387,6 +387,9 @@ export class CollectionDockingView extends React.Component { + console.log("DROPPPP THE BASS!", e); + } ReactDOM.render( CollectionDockingView.AddTab(stack, doc)} />, upDiv); tab.reactComponents = [upDiv]; tab.element.append(upDiv); @@ -422,7 +425,6 @@ export class CollectionDockingView extends React.Component { //stack.header.controlsContainer.find('.lm_popout').hide(); @@ -462,102 +464,4 @@ export class CollectionDockingView extends React.Component { - _mainCont = React.createRef(); - @observable private _panelWidth = 0; - @observable private _panelHeight = 0; - @observable private _document: Opt; - private get parentProps(): SubCollectionViewProps { - return this.props.parent.props; - } - - get _stack(): any { - let parent = this.props.glContainer.parent.parent; - if (this._document && this._document.excludeFromLibrary && parent.parent && parent.parent.contentItems.length > 1) - return parent.parent.contentItems[1]; - return parent; - } - constructor(props: any) { - super(props); - DocServer.GetRefField(this.props.documentId).then(action((f: Opt) => this._document = f as Doc)); - } - - nativeWidth = () => NumCast(this._document!.nativeWidth, this._panelWidth); - nativeHeight = () => NumCast(this._document!.nativeHeight, this._panelHeight); - contentScaling = () => { - const nativeH = this.nativeHeight(); - const nativeW = this.nativeWidth(); - let wscale = this._panelWidth / nativeW; - return wscale * nativeH > this._panelHeight ? this._panelHeight / nativeH : wscale; - } - - ScreenToLocalTransform = () => { - if (this._mainCont.current && this._mainCont.current.children) { - let { scale, translateX, translateY } = Utils.GetScreenTransform(this._mainCont.current.children[0].firstChild as HTMLElement); - scale = Utils.GetScreenTransform(this._mainCont.current).scale; - return this.parentProps.ScreenToLocalTransform().translate(-translateX, -translateY).scale(1 / this.contentScaling() / scale); - } - return Transform.Identity(); - } - get scaleToFitMultiplier() { - let docWidth = NumCast(this._document!.width); - let docHeight = NumCast(this._document!.height); - if (NumCast(this._document!.nativeWidth) || !docWidth || !this._panelWidth || !this._panelHeight) return 1; - if (StrCast(this._document!.layout).indexOf("Collection") === -1 || - NumCast(this._document!.viewType) !== CollectionViewType.Freeform) return 1; - let scaling = Math.max(1, this._panelWidth / docWidth * docHeight > this._panelHeight ? - this._panelHeight / docHeight : this._panelWidth / docWidth); - return scaling; - } - get previewPanelCenteringOffset() { return (this._panelWidth - this.nativeWidth() * this.contentScaling()) / 2; } - - addDocTab = (doc: Doc, location: string) => { - if (location === "onRight") { - CollectionDockingView.AddRightSplit(doc); - } else { - CollectionDockingView.AddTab(this._stack, doc); - } - } - get content() { - if (!this._document) { - return (null); - } - return ( -
- -
); - } - - render() { - let theContent = this.content; - return !this._document ? (null) : - { this._panelWidth = r.offset.width; this._panelHeight = r.offset.height; })}> - {({ measureRef }) =>
{theContent}
} -
; - } } \ No newline at end of file diff --git a/src/client/views/collections/CollectionSchemaView.tsx b/src/client/views/collections/CollectionSchemaView.tsx index 11d71d023..477879b79 100644 --- a/src/client/views/collections/CollectionSchemaView.tsx +++ b/src/client/views/collections/CollectionSchemaView.tsx @@ -261,7 +261,7 @@ export class CollectionSchemaView extends CollectionSubView(doc => doc) { let dbName = StrCast(this.props.Document.title); let res = await Gateway.Instance.PostSchema(csv, dbName); if (self.props.CollectionView.props.addDocument) { - let schemaDoc = await Docs.DBDocument("https://www.cs.brown.edu/" + dbName, { title: dbName }, { dbDoc: self.props.Document }); + let schemaDoc = await Docs.Create.DBDocument("https://www.cs.brown.edu/" + dbName, { title: dbName }, { dbDoc: self.props.Document }); if (schemaDoc) { //self.props.CollectionView.props.addDocument(schemaDoc, false); self.props.Document.schemaDoc = schemaDoc; diff --git a/src/client/views/collections/CollectionSubView.tsx b/src/client/views/collections/CollectionSubView.tsx index be37efd3d..440a2410b 100644 --- a/src/client/views/collections/CollectionSubView.tsx +++ b/src/client/views/collections/CollectionSubView.tsx @@ -113,20 +113,20 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { protected async getDocumentFromType(type: string, path: string, options: DocumentOptions): Promise> { let ctor: ((path: string, options: DocumentOptions) => (Doc | Promise)) | undefined = undefined; if (type.indexOf("image") !== -1) { - ctor = Docs.ImageDocument; + ctor = Docs.Create.ImageDocument; } if (type.indexOf("video") !== -1) { - ctor = Docs.VideoDocument; + ctor = Docs.Create.VideoDocument; } if (type.indexOf("audio") !== -1) { - ctor = Docs.AudioDocument; + ctor = Docs.Create.AudioDocument; } if (type.indexOf("pdf") !== -1) { - ctor = Docs.PdfDocument; + ctor = Docs.Create.PdfDocument; options.nativeWidth = 1200; } if (type.indexOf("excel") !== -1) { - ctor = Docs.DBDocument; + ctor = Docs.Create.DBDocument; options.dropAction = "copy"; } if (type.indexOf("html") !== -1) { @@ -145,7 +145,7 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { }); return undefined; } - ctor = Docs.WebDocument; + ctor = Docs.Create.WebDocument; options = { height: options.width, ...options, title: path, nativeWidth: undefined }; } return ctor ? ctor(path, options) : undefined; @@ -175,13 +175,13 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { return; } if (html && html.indexOf(" { onWorkspaceContextMenu = (e: React.MouseEvent): void => { if (!e.isPropagationStopped()) { // need to test this because GoldenLayout causes a parallel hierarchy in the React DOM for its children and the main document view7 ContextMenu.Instance.addItem({ description: "Open as Workspace", event: undoBatch(() => MainView.Instance.openWorkspace(this.props.document)) }); - ContextMenu.Instance.addItem({ description: "Open Fields", event: () => this.props.addDocTab(Docs.KVPDocument(this.props.document, { width: 300, height: 300 }), "onRight"), icon: "layer-group" }); + ContextMenu.Instance.addItem({ description: "Open Fields", event: () => this.props.addDocTab(Docs.Create.KVPDocument(this.props.document, { width: 300, height: 300 }), "onRight"), icon: "layer-group" }); if (NumCast(this.props.document.viewType) !== CollectionViewType.Docking) { ContextMenu.Instance.addItem({ description: "Open Tab", event: () => this.props.addDocTab(this.props.document, "inTab"), icon: "folder" }); ContextMenu.Instance.addItem({ description: "Open Right", event: () => this.props.addDocTab(this.props.document, "onRight"), icon: "caret-square-right" }); diff --git a/src/client/views/collections/CollectionVideoView.tsx b/src/client/views/collections/CollectionVideoView.tsx index 7853544d5..bd5cd5450 100644 --- a/src/client/views/collections/CollectionVideoView.tsx +++ b/src/client/views/collections/CollectionVideoView.tsx @@ -98,7 +98,7 @@ export class CollectionVideoView extends React.Component { SearchBox.convertDataUri(dataUrl, filename).then((returnedFilename) => { if (returnedFilename) { let url = DocServer.prepend(returnedFilename); - let imageSummary = Docs.ImageDocument(url, { + let imageSummary = Docs.Create.ImageDocument(url, { x: NumCast(this.props.Document.x) + width, y: NumCast(this.props.Document.y), width: 150, height: height / width * 150, title: "--snapshot" + NumCast(this.props.Document.curPage) + " image-" }); diff --git a/src/client/views/collections/DockedFrameRenderer.tsx b/src/client/views/collections/DockedFrameRenderer.tsx new file mode 100644 index 000000000..25d4b2a49 --- /dev/null +++ b/src/client/views/collections/DockedFrameRenderer.tsx @@ -0,0 +1,116 @@ +import 'golden-layout/src/css/goldenlayout-base.css'; +import 'golden-layout/src/css/goldenlayout-dark-theme.css'; +import { action, observable, reaction, Lambda, IReactionDisposer } from "mobx"; +import { observer } from "mobx-react"; +import Measure, { ContentRect } from "react-measure"; +import { Doc, Field, Opt, DocListCast } from "../../../new_fields/Doc"; +import { FieldId } from "../../../new_fields/RefField"; +import { Cast, NumCast, StrCast } from "../../../new_fields/Types"; +import { emptyFunction, returnTrue, Utils } from "../../../Utils"; +import { DocServer } from "../../DocServer"; +import { Transform } from '../../util/Transform'; +import { DocumentView } from "../nodes/DocumentView"; +import "./CollectionDockingView.scss"; +import { SubCollectionViewProps } from "./CollectionSubView"; +import React = require("react"); +import { CollectionViewType } from './CollectionBaseView'; +import { Id } from '../../../new_fields/FieldSymbols'; +import { CollectionDockingView } from './CollectionDockingView'; + +interface DockedFrameProps { + documentId: FieldId; + glContainer: any; + glEventHub: any; + parent: CollectionDockingView; +} + +@observer +export class DockedFrameRenderer extends React.Component { + _mainCont = React.createRef(); + @observable private _panelWidth = 0; + @observable private _panelHeight = 0; + @observable private _document: Opt; + private get parentProps(): SubCollectionViewProps { + return this.props.parent.props; + } + + get _stack(): any { + let parent = this.props.glContainer.parent.parent; + if (this._document && this._document.excludeFromLibrary && parent.parent && parent.parent.contentItems.length > 1) + return parent.parent.contentItems[1]; + return parent; + } + constructor(props: any) { + super(props); + DocServer.GetRefField(this.props.documentId).then(action((f: Opt) => this._document = f as Doc)); + } + + nativeWidth = () => NumCast(this._document!.nativeWidth, this._panelWidth); + nativeHeight = () => NumCast(this._document!.nativeHeight, this._panelHeight); + contentScaling = () => { + const nativeH = this.nativeHeight(); + const nativeW = this.nativeWidth(); + let wscale = this._panelWidth / nativeW; + return wscale * nativeH > this._panelHeight ? this._panelHeight / nativeH : wscale; + } + + ScreenToLocalTransform = () => { + if (this._mainCont.current && this._mainCont.current.children) { + let { scale, translateX, translateY } = Utils.GetScreenTransform(this._mainCont.current.children[0].firstChild as HTMLElement); + scale = Utils.GetScreenTransform(this._mainCont.current).scale; + return this.parentProps.ScreenToLocalTransform().translate(-translateX, -translateY).scale(1 / this.contentScaling() / scale); + } + return Transform.Identity(); + } + get scaleToFitMultiplier() { + let docWidth = NumCast(this._document!.width); + let docHeight = NumCast(this._document!.height); + if (NumCast(this._document!.nativeWidth) || !docWidth || !this._panelWidth || !this._panelHeight) return 1; + if (StrCast(this._document!.layout).indexOf("Collection") === -1 || + NumCast(this._document!.viewType) !== CollectionViewType.Freeform) return 1; + let scaling = Math.max(1, this._panelWidth / docWidth * docHeight > this._panelHeight ? + this._panelHeight / docHeight : this._panelWidth / docWidth); + return scaling; + } + get previewPanelCenteringOffset() { return (this._panelWidth - this.nativeWidth() * this.contentScaling()) / 2; } + + addDocTab = (doc: Doc, location: string) => { + if (location === "onRight") { + CollectionDockingView.AddRightSplit(doc); + } else { + CollectionDockingView.AddTab(this._stack, doc); + } + } + get content() { + if (!this._document) { + return (null); + } + return ( +
+ +
); + } + + render() { + let theContent = this.content; + return !this._document ? (null) : + { this._panelWidth = r.offset.width; this._panelHeight = r.offset.height; })}> + {({ measureRef }) =>
{theContent}
} +
; + } +} \ No newline at end of file diff --git a/src/client/views/collections/collectionFreeForm/MarqueeView.tsx b/src/client/views/collections/collectionFreeForm/MarqueeView.tsx index 29734fa19..cd386abfa 100644 --- a/src/client/views/collections/collectionFreeForm/MarqueeView.tsx +++ b/src/client/views/collections/collectionFreeForm/MarqueeView.tsx @@ -79,7 +79,7 @@ export class MarqueeView extends React.Component } ns.map(line => { let indent = line.search(/\S|$/); - let newBox = Docs.TextDocument({ width: 200, height: 35, x: x + indent / 3 * 10, y: y, documentText: "@@@" + line, title: line }); + let newBox = Docs.Create.TextDocument({ width: 200, height: 35, x: x + indent / 3 * 10, y: y, documentText: "@@@" + line, title: line }); this.props.addDocument(newBox, false); y += 40 * this.props.getTransform().Scale; }); @@ -89,13 +89,13 @@ export class MarqueeView extends React.Component navigator.clipboard.readText().then(text => { let ns = text.split("\n").filter(t => t.trim() !== "\r" && t.trim() !== ""); if (ns.length === 1 && text.startsWith("http")) { - this.props.addDocument(Docs.ImageDocument(text, { nativeWidth: 300, width: 300, x: x, y: y }), false);// paste an image from its URL in the paste buffer + this.props.addDocument(Docs.Create.ImageDocument(text, { nativeWidth: 300, width: 300, x: x, y: y }), false);// paste an image from its URL in the paste buffer } else { this.pasteTable(ns, x, y); } }); } else { - let newBox = Docs.TextDocument({ width: 200, height: 100, x: x, y: y, title: "-typed text-" }); + let newBox = Docs.Create.TextDocument({ width: 200, height: 100, x: x, y: y, title: "-typed text-" }); this.props.addLiveTextDocument(newBox); } e.stopPropagation(); @@ -136,7 +136,7 @@ export class MarqueeView extends React.Component doc.width = 200; docList.push(doc); } - let newCol = Docs.SchemaDocument([...(groupAttr ? ["_group"] : []), ...columns.filter(c => c)], docList, { x: x, y: y, title: "droppedTable", width: 300, height: 100 }); + let newCol = Docs.Create.SchemaDocument([...(groupAttr ? ["_group"] : []), ...columns.filter(c => c)], docList, { x: x, y: y, title: "droppedTable", width: 300, height: 100 }); this.props.addDocument(newCol, false); } @@ -259,7 +259,7 @@ export class MarqueeView extends React.Component let ink = Cast(this.props.container.props.Document.ink, InkField); let inkData = ink ? ink.inkData : undefined; let zoomBasis = NumCast(this.props.container.props.Document.scale, 1); - let newCollection = Docs.FreeformDocument(selected, { + let newCollection = Docs.Create.FreeformDocument(selected, { x: bounds.left, y: bounds.top, panX: 0, @@ -283,14 +283,14 @@ export class MarqueeView extends React.Component d.page = -1; return d; }); - let summary = Docs.TextDocument({ x: bounds.left, y: bounds.top, width: 300, height: 100, backgroundColor: "#e2ad32" /* yellow */, title: "-summary-" }); + let summary = Docs.Create.TextDocument({ x: bounds.left, y: bounds.top, width: 300, height: 100, backgroundColor: "#e2ad32" /* yellow */, title: "-summary-" }); newCollection.proto!.summaryDoc = summary; selected = [newCollection]; newCollection.x = bounds.left + bounds.width; summary.proto!.subBulletDocs = new List(selected); //summary.proto!.maximizeLocation = "inTab"; // or "inPlace", or "onRight" summary.templates = new List([Templates.Bullet.Layout]); - let container = Docs.FreeformDocument([summary, newCollection], { x: bounds.left, y: bounds.top, width: 300, height: 200, title: "-summary-" }); + let container = Docs.Create.FreeformDocument([summary, newCollection], { x: bounds.left, y: bounds.top, width: 300, height: 200, title: "-summary-" }); container.viewType = CollectionViewType.Stacking; this.props.addLiveTextDocument(container); // }); @@ -303,11 +303,11 @@ export class MarqueeView extends React.Component d.page = -1; return d; }); - let summary = Docs.TextDocument({ x: bounds.left, y: bounds.top, width: 300, height: 100, backgroundColor: "#e2ad32" /* yellow */, title: "-summary-" }); + let summary = Docs.Create.TextDocument({ x: bounds.left, y: bounds.top, width: 300, height: 100, backgroundColor: "#e2ad32" /* yellow */, title: "-summary-" }); SearchBox.convertDataUri(dataUrl, "icon" + summary[Id] + "_image").then((returnedFilename) => { if (returnedFilename) { let url = DocServer.prepend(returnedFilename); - let imageSummary = Docs.ImageDocument(url, { + let imageSummary = Docs.Create.ImageDocument(url, { x: bounds.left, y: bounds.top + 100 / zoomBasis, width: 150, height: bounds.height / bounds.width * 150, title: "-summary image-" }); diff --git a/src/client/views/nodes/DocumentView.tsx b/src/client/views/nodes/DocumentView.tsx index efba26c2c..16e40000d 100644 --- a/src/client/views/nodes/DocumentView.tsx +++ b/src/client/views/nodes/DocumentView.tsx @@ -302,7 +302,7 @@ export class DocumentView extends DocComponent(Docu } deleteClicked = (): void => { this.props.removeDocument && this.props.removeDocument(this.props.Document); } - fieldsClicked = (): void => { this.props.addDocTab(Docs.KVPDocument(this.props.Document, { width: 300, height: 300 }), "onRight") }; + fieldsClicked = (): void => { this.props.addDocTab(Docs.Create.KVPDocument(this.props.Document, { width: 300, height: 300 }), "onRight") }; makeBtnClicked = (): void => { let doc = Doc.GetProto(this.props.Document); doc.isButton = !BoolCast(doc.isButton, false); @@ -418,7 +418,7 @@ export class DocumentView extends DocComponent(Docu cm.addItem({ description: "Find aliases", event: async () => { const aliases = await SearchUtil.GetAliasesOfDocument(this.props.Document); - this.props.addDocTab && this.props.addDocTab(Docs.SchemaDocument(["title"], aliases, {}), "onRight"); + this.props.addDocTab && this.props.addDocTab(Docs.Create.SchemaDocument(["title"], aliases, {}), "onRight"); }, icon: "search" }); cm.addItem({ description: "Center View", event: () => this.props.focus(this.props.Document), icon: "crosshairs" }); diff --git a/src/mobile/ImageUpload.tsx b/src/mobile/ImageUpload.tsx index bfc1738fc..a8f94b746 100644 --- a/src/mobile/ImageUpload.tsx +++ b/src/mobile/ImageUpload.tsx @@ -33,7 +33,7 @@ class Uploader extends React.Component { onClick = async () => { try { this.status = "initializing protos"; - await Docs.initProtos(); + await Docs.Prototypes.initialize(); let imgPrev = document.getElementById("img_preview"); if (imgPrev) { let files: FileList | null = inputRef.current!.files; @@ -53,7 +53,7 @@ class Uploader extends React.Component { const json = await res.json(); json.map(async (file: any) => { let path = window.location.origin + file; - var doc = Docs.ImageDocument(path, { nativeWidth: 200, width: 200, title: name }); + var doc = Docs.Create.ImageDocument(path, { nativeWidth: 200, width: 200, title: name }); this.status = "getting user document"; diff --git a/src/new_fields/Doc.ts b/src/new_fields/Doc.ts index 7f7263cf1..af65f5482 100644 --- a/src/new_fields/Doc.ts +++ b/src/new_fields/Doc.ts @@ -172,6 +172,18 @@ export namespace Doc { } return protos; } + + /** + * This function is intended to model Object.assign({}, {}) [https://mzl.la/1Mo3l21], which copies + * the values of the properties of a source object into the target. + * + * This is just a specific, Dash-authored version that serves the same role for our + * Doc class. + * + * @param doc the target document into which you'd like to insert the new fields + * @param fields the fields to project onto the target. Its type signature defines a mapping from some string key + * to a potentially undefined field, where each entry in this mapping is optional. + */ export function assign(doc: Doc, fields: Partial>>) { for (const key in fields) { if (fields.hasOwnProperty(key)) { diff --git a/src/new_fields/util.ts b/src/new_fields/util.ts index 2b304c373..8caceb063 100644 --- a/src/new_fields/util.ts +++ b/src/new_fields/util.ts @@ -60,6 +60,7 @@ export function getter(target: any, prop: string | symbol | number, receiver: an } return getField(target, prop); } + function getProtoField(protoField: Doc | undefined, prop: string | number, cb?: (field: Field | undefined) => void) { if (!protoField) return undefined; let field = protoField[prop]; diff --git a/src/server/authentication/models/current_user_utils.ts b/src/server/authentication/models/current_user_utils.ts index e5b7a025b..169be3b99 100644 --- a/src/server/authentication/models/current_user_utils.ts +++ b/src/server/authentication/models/current_user_utils.ts @@ -33,8 +33,8 @@ export class CurrentUserUtils { doc.title = this.email; doc.data = new List(); doc.excludeFromLibrary = true; - doc.optionalRightCollection = Docs.StackingDocument([], { title: "New mobile uploads" }); - // doc.library = Docs.TreeDocument([doc], { title: `Library: ${CurrentUserUtils.email}` }); + doc.optionalRightCollection = Docs.Create.StackingDocument([], { title: "New mobile uploads" }); + // doc.library = Docs.Create.TreeDocument([doc], { title: `Library: ${CurrentUserUtils.email}` }); // (doc.library as Doc).excludeFromLibrary = true; return doc; } @@ -94,12 +94,12 @@ export class CurrentUserUtils { // new AttributeTransformationModel(atmod, AggregateFunction.None), // new AttributeTransformationModel(atmod, AggregateFunction.Count), // new AttributeTransformationModel(atmod, AggregateFunction.Count)); - // schemaDocuments.push(Docs.HistogramDocument(histoOp, { width: 200, height: 200, title: attr.displayName! })); + // schemaDocuments.push(Docs.Create.HistogramDocument(histoOp, { width: 200, height: 200, title: attr.displayName! })); // } // }))); // return promises; // }, [] as Promise[])); - // return CurrentUserUtils._northstarSchemas.push(Docs.TreeDocument(schemaDocuments, { width: 50, height: 100, title: schema.displayName! })); + // return CurrentUserUtils._northstarSchemas.push(Docs.Create.TreeDocument(schemaDocuments, { width: 50, height: 100, title: schema.displayName! })); // }); // } } -- cgit v1.2.3-70-g09d2 From a3e1f7332e0cb96dae0abd80a2972ae74ac31104 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Mon, 17 Jun 2019 13:30:37 -0400 Subject: Doc server documentation completed --- src/Utils.ts | 10 +- src/client/DocServer.ts | 285 +++++++++++++++++++++++++-------- src/client/util/SerializationHelper.ts | 4 +- src/new_fields/Doc.ts | 4 +- src/new_fields/util.ts | 4 +- src/server/index.ts | 12 +- 6 files changed, 235 insertions(+), 84 deletions(-) (limited to 'src') diff --git a/src/Utils.ts b/src/Utils.ts index 611c61135..657bd673e 100644 --- a/src/Utils.ts +++ b/src/Utils.ts @@ -73,14 +73,14 @@ export class Utils { }; } - public static Emit(socket: Socket | SocketIOClient.Socket, message: Message, args: T) { + public static emit(socket: Socket | SocketIOClient.Socket, message: Message, args: T) { this.log("Emit", message.Name, args, false); socket.emit(message.Message, args); } - public static EmitCallback(socket: Socket | SocketIOClient.Socket, message: Message, args: T): Promise; - public static EmitCallback(socket: Socket | SocketIOClient.Socket, message: Message, args: T, fn: (args: any) => any): void; - public static EmitCallback(socket: Socket | SocketIOClient.Socket, message: Message, args: T, fn?: (args: any) => any): void | Promise { + public static emitCallback(socket: Socket | SocketIOClient.Socket, message: Message, args: T): Promise; + public static emitCallback(socket: Socket | SocketIOClient.Socket, message: Message, args: T, fn: (args: any) => any): void; + public static emitCallback(socket: Socket | SocketIOClient.Socket, message: Message, args: T, fn?: (args: any) => any): void | Promise { this.log("Emit", message.Name, args, false); if (fn) { socket.emit(message.Message, args, this.loggingCallback('Receiving', fn, message.Name)); @@ -89,7 +89,7 @@ export class Utils { } } - public static AddServerHandler(socket: Socket | SocketIOClient.Socket, message: Message, handler: (args: T) => any) { + public static addServerHandler(socket: Socket | SocketIOClient.Socket, message: Message, handler: (args: T) => any) { socket.on(message.Message, this.loggingCallback('Incoming', handler, message.Name)); } diff --git a/src/client/DocServer.ts b/src/client/DocServer.ts index d759b4757..ad7c706b6 100644 --- a/src/client/DocServer.ts +++ b/src/client/DocServer.ts @@ -1,35 +1,116 @@ import * as OpenSocket from 'socket.io-client'; -import { MessageStore } from "./../server/Message"; +import { MessageStore, Diff } from "./../server/Message"; import { Opt } from '../new_fields/Doc'; import { Utils, emptyFunction } from '../Utils'; import { SerializationHelper } from './util/SerializationHelper'; import { RefField } from '../new_fields/RefField'; import { Id, HandleUpdate } from '../new_fields/FieldSymbols'; +/** + * This class encapsulates the transfer and cross-client synchronization of + * data stored only in documents (RefFields). In the process, it also + * creates and maintains a cache of documents so that they can be accessed + * more efficiently. Currently, there is no cache eviction scheme in place. + * + * NOTE: while this class is technically abstracted to work with any [RefField], because + * [Doc] instances are the only [RefField] we need / have implemented at the moment, the documentation + * will treat all data used here as [Doc]s + * + * Any time we want to write a new field to the database (via the server) + * or update ourselves based on the server's update message, that occurs here + */ export namespace DocServer { + // a document cache for efficient document retrieval const _cache: { [id: string]: RefField | Promise> } = {}; + // the handle / client side endpoint of the web socket (https://bit.ly/2TeALea for more info) connection established with the server const _socket = OpenSocket(`${window.location.protocol}//${window.location.hostname}:4321`); + // this client's distinct GUID created at initialization const GUID: string = Utils.GenerateGuid(); + // indicates whether or not a document is currently being udpated, and, if so, its id + let updatingId: string | undefined; - export function makeReadOnly() { - _CreateField = emptyFunction; - _UpdateField = emptyFunction; - _respondToUpdate = emptyFunction; - } + export namespace Util { - export function prepend(extension: string): string { - return window.location.origin + extension; - } + /** + * Whenever the server sends us its handshake message on our + * websocket, we use the above function to return the handshake. + */ + Utils.addServerHandler(_socket, MessageStore.Foo, onConnection); + + /** + * This function emits a message (with this client's + * unique GUID) to the server + * indicating that this client has connected + */ + function onConnection() { + _socket.emit(MessageStore.Bar.Message, GUID); + } + + /** + * A convenience method. Prepends the full path (i.e. http://localhost:1050) to the + * requested extension + * @param extension the specified sub-path to append to the window origin + */ + export function prepend(extension: string): string { + return window.location.origin + extension; + } + + /** + * Emits a message to the server that wipes + * all documents in the database. + */ + export function deleteDatabase() { + Utils.emit(_socket, MessageStore.DeleteAll, {}); + } + + /** + * This disables this client's ability to write new fields, + * update existing fields, and update and reflect the changes if + * other clients update shared fields. Thus, the client can only read + * a static snapshot of their workspaces + * + * Currently this is conditionally called in MainView.tsx when analyzing + * the document's url. + */ + export function makeReadOnly() { + // replaces default functionality with no-ops + _createField = emptyFunction; + _emitFieldUpdate = emptyFunction; + _respondToUpdate = emptyFunction; + } - export function DeleteDatabase() { - Utils.Emit(_socket, MessageStore.DeleteAll, {}); } - export async function GetRefField(id: string): Promise> { + // RETRIEVE DOCS FROM SERVER + + /** + * Given a single Doc GUID, this utility function will asynchronously attempt to fetch the id's associated + * field, first looking in the RefField cache and then communicating with + * the server if the document has not been cached. + * @param id the id of the requested document + */ + export async function getRefField(id: string): Promise> { + // an initial pass through the cache to determine whether the document needs to be fetched, + // is already in the process of being fetched or already exists in the + // cache let cached = _cache[id]; if (cached === undefined) { - const prom = Utils.EmitCallback(_socket, MessageStore.GetRefField, id).then(async fieldJson => { + // NOT CACHED => we'll have to send a request to the server + + // synchronously, we emit a single callback to the server requesting the serialized (i.e. represented by a string) + // field for the given ids. This returns a promise, which, when resolved, indicates the the JSON serialized version of + // the field has been returned from the server + const getSerializedField = Utils.emitCallback(_socket, MessageStore.GetRefField, id); + + // when the serialized RefField has been received, go head and begin deserializing it into an object. + // Here, once deserialized, we also invoke .proto to 'load' the document's prototype, which ensures that all + // future .proto calls on the Doc won't have to go farther than the cache to get their actual value. + const deserializeField = getSerializedField.then(async fieldJson => { + // deserialize const field = SerializationHelper.Deserialize(fieldJson); + // either way, overwrite or delete any promises cached at this id (that we inserted as flags + // to indicate that the field was in the process of being fetched). Now everything + // should be an actual value within or entirely absent from the cache. if (field !== undefined) { await field.proto; _cache[id] = field; @@ -38,41 +119,45 @@ export namespace DocServer { } return field; }); - _cache[id] = prom; - return prom; + // here, indicate that the document associated with this id is currently + // being retrieved and cached + _cache[id] = deserializeField; + return deserializeField; } else if (cached instanceof Promise) { + // BEING RETRIEVED AND CACHED => some other caller previously (likely recently) called GetRefField(s), + // and requested the document I'm looking for. Shouldn't fetch again, just + // return this promise which will resolve to the field itself (see 7) return cached; } else { + // CACHED => great, let's just return the cached field we have return cached; } } /** - * Given a list of Doc GUIDs, this utility function will asynchronously attempt to fetch each document - * associated with a given input id, first looking in the RefField cache and then communicating with - * the server if the document was not found there. - * + * Given a list of Doc GUIDs, this utility function will asynchronously attempt to each id's associated + * field, first looking in the RefField cache and then communicating with + * the server if the document has not been cached. * @param ids the ids that map to the reqested documents */ - export async function GetRefFields(ids: string[]): Promise<{ [id: string]: Opt }> { + export async function getRefFields(ids: string[]): Promise<{ [id: string]: Opt }> { const requestedIds: string[] = []; const waitingIds: string[] = []; const promises: Promise>[] = []; const map: { [id: string]: Opt } = {}; - // 1) An initial pass through the cache to determine which documents need to be fetched, + // 1) an initial pass through the cache to determine which documents need to be fetched, // which are already in the process of being fetched and which already exist in the // cache for (const id of ids) { const cached = _cache[id]; - if (cached === undefined) { // NOT CACHED => we'll have to send a request to the server requestedIds.push(id); } else if (cached instanceof Promise) { - // BEING CACHED => someone else previously (likely recently) called GetRefFields, + // BEING RETRIEVED AND CACHED => some other caller previously (likely recently) called GetRefField(s), // and requested one of the documents I'm looking for. Shouldn't fetch again, just - // wait until this promise is resolved (see the second to last line of the function) + // wait until this promise is resolved (see 7) promises.push(cached); waitingIds.push(id); } else { @@ -81,46 +166,49 @@ export namespace DocServer { } } - // 2) Synchronously, we emit a single callback to the server requesting the documents for the given ids. - // This returns a promise, which, when resolved, indicates that all the JSON serialized versions of + // 2) synchronously, we emit a single callback to the server requesting the serialized (i.e. represented by a string) + // fields for the given ids. This returns a promise, which, when resolved, indicates that all the JSON serialized versions of // the fields have been returned from the server - const fieldsReceived: Promise = Utils.EmitCallback(_socket, MessageStore.GetRefFields, requestedIds); + const getSerializedFields: Promise = Utils.emitCallback(_socket, MessageStore.GetRefFields, requestedIds); - // 3) When the serialized RefFields have been received, go head and begin deserializing them into objects. + // 3) when the serialized RefFields have been received, go head and begin deserializing them into objects. // Here, once deserialized, we also invoke .proto to 'load' the documents' prototypes, which ensures that all - // future .proto calls won't have to go farther than the cache to get their actual value. - const fieldsDeserialized = fieldsReceived.then(async fields => { + // future .proto calls on the Doc won't have to go farther than the cache to get their actual value. + const deserializeFields = getSerializedFields.then(async fields => { const fieldMap: { [id: string]: RefField } = {}; - const deserializedFields: any = []; + const protosToLoad: any = []; for (const field of fields) { if (field !== undefined) { // deserialize let deserialized: any = SerializationHelper.Deserialize(field); fieldMap[field.id] = deserialized; - deserializedFields.push(deserialized.proto); + // adds to a list of promises that will be awaited asynchronously + protosToLoad.push(deserialized.proto); } } - // this actually handles the loeading of prototypes - await Promise.all(deserializedFields); + // this actually handles the loading of prototypes + await Promise.all(protosToLoad); return fieldMap; }); - // 4) Here, for each of the documents we've requested *ourselves* (i.e. weren't promises or found in the cache) + // 4) here, for each of the documents we've requested *ourselves* (i.e. weren't promises or found in the cache) // we set the value at the field's id to a promise that will resolve to the field. // When we find that promises exist at keys in the cache, THIS is where they were set, just by some other caller (method). - requestedIds.forEach(id => _cache[id] = fieldsDeserialized.then(fields => fields[id])); + // The mapping in the .then call ensures that when other callers await these promises, they'll + // get the resolved field + requestedIds.forEach(id => _cache[id] = deserializeFields.then(fields => fields[id])); - // 5) At this point, all fields have a) been returned from the server and b) been deserialized into actual Field objects whose + // 5) at this point, all fields have a) been returned from the server and b) been deserialized into actual Field objects whose // prototype documents, if any, have also been fetched and cached. - const fields = await fieldsDeserialized; + const fields = await deserializeFields; - // 6) With this confidence, we can now go through and update the cache at the ids of the fields that + // 6) with this confidence, we can now go through and update the cache at the ids of the fields that // we explicitly had to fetch. To finish it off, we add whatever value we've come up with for a given // id to the soon to be returned field mapping. requestedIds.forEach(id => { const field = fields[id]; - // either way, overwrite or delete any promises that we inserted as flags - // to indicate that the field was in the process of being fetched. Now everything + // either way, overwrite or delete any promises (that we inserted as flags + // to indicate that the field was in the process of being fetched). Now everything // should be an actual value within or entirely absent from the cache. if (field !== undefined) { _cache[id] = field; @@ -130,78 +218,141 @@ export namespace DocServer { map[id] = field; }); - // 7) Those promises we encountered in the else if of 1), which represent + // 7) those promises we encountered in the else if of 1), which represent // other callers having already submitted a request to the server for (a) document(s) // in which we're interested, must still be awaited so that we can return the proper // values for those as well. // - // Fortunately, those other callers will also hit their own version of 6) and clean up + // fortunately, those other callers will also hit their own version of 6) and clean up // the shared cache when these promises resolve, so all we have to do is... const otherCallersFetching = await Promise.all(promises); // ...extract the RefFields returned from the resolution of those promises and add them to our // own map. waitingIds.forEach((id, index) => map[id] = otherCallersFetching[index]); - // Now, we return our completed mapping from all of the ids that were passed into the method + // now, we return our completed mapping from all of the ids that were passed into the method // to their actual RefField | undefined values. This return value either becomes the input // argument to the caller's promise (i.e. GetRefFields.then(map => //do something with map...)) // or it is the direct return result if the promise is awaited. return map; } - let _UpdateField = (id: string, diff: any) => { + // WRITE A NEW DOCUMENT TO THE SERVER + + /** + * A wrapper around the function local variable _createField. + * This allows us to swap in different executions while comfortably + * calling the same function throughout the code base (such as in Util.makeReadonly()) + * @param field the [RefField] to be serialized and sent to the server to be stored in the database + */ + export function createField(field: RefField) { + _createField(field); + } + + /** + * The default behavior for field creation. This inserts the [Doc] instance + * in the cache at its id, serializes the [Doc]'s initial state + * and finally sends that seruialized data to the server. + * @param field the [RefField] to be serialized and sent to the server to be stored in the database + */ + let _createField = (field: RefField) => { + _cache[field[Id]] = field; + const initialState = SerializationHelper.serialize(field); + Utils.emit(_socket, MessageStore.CreateField, initialState); + }; + + // NOTIFY THE SERVER OF AN UPDATE TO A DOC'S STATE + + /** + * A wrapper around the function local variable _emitFieldUpdate. + * This allows us to swap in different executions while comfortably + * calling the same function throughout the code base (such as in Util.makeReadonly()) + * @param id the id of the [Doc] whose state has been updated in our client + * @param updatedState the new value of the document. At some point, this + * should actually be a proper diff, to improve efficiency + */ + export function emitFieldUpdate(id: string, updatedState: any) { + _emitFieldUpdate(id, updatedState); + } + + /** + * The default behavior for indicating to the server that we've locally updated + * a document. + * @param id the id of the [Doc] whose state has been updated in our client + * @param updatedState the new value of the document. At some point, this + * should actually be a proper diff, to improve efficiency + */ + let _emitFieldUpdate = (id: string, updatedState: any) => { + // don't emit a duplicate message if the server is already + // (asynchronously) still updating this document's state. if (id === updatingId) { return; } - Utils.Emit(_socket, MessageStore.UpdateField, { id, diff }); + // creates the diff object to send to the server + let diff: Diff = { id, diff: updatedState }; + // emit this diff to notify server + Utils.emit(_socket, MessageStore.UpdateField, diff); }; - export function UpdateField(id: string, diff: any) { - _UpdateField(id, diff); - } + // RESPOND TO THE SERVER'S INDICATION THAT A DOC'S STATE HAS BEEN UPDATED - let _CreateField = (field: RefField) => { - _cache[field[Id]] = field; - const initialState = SerializationHelper.Serialize(field); - Utils.Emit(_socket, MessageStore.CreateField, initialState); - }; + /** + * Whenever the client receives an update, execute the + * current behavior. + */ + Utils.addServerHandler(_socket, MessageStore.UpdateField, respondToUpdate); - export function CreateField(field: RefField) { - _CreateField(field); + /** + * A wrapper around the function local variable _respondToUpdate. + * This allows us to swap in different executions while comfortably + * calling the same function throughout the code base (such as in Util.makeReadonly()) + * @param diff kept as [any], but actually the [Diff] object sent from the server containing + * the [Doc]'s id and its new state + */ + function respondToUpdate(diff: any) { + _respondToUpdate(diff); } - let updatingId: string | undefined; + /** + * The default behavior for responding to another client's indication + * that it has updated the state of a [Doc] that is also in use by + * this client + * @param diff kept as [any], but actually the [Diff] object sent from the server containing + * the [Doc]'s id and its new state + */ let _respondToUpdate = (diff: any) => { const id = diff.id; + // to be valid, the Diff object must reference + // a document's id if (id === undefined) { return; } - const field = _cache[id]; const update = (f: Opt) => { + // if the RefField is absent from the cache or + // its promise in the cache resolves to undefined, there + // can't be anything to update if (f === undefined) { return; } + // extract this Doc's update handler const handler = f[HandleUpdate]; if (handler) { + // set the 'I'm currently updating this Doc' flag updatingId = id; handler.call(f, diff.diff); + // reset to indicate no ongoing updates updatingId = undefined; } }; + // check the cache for the field + const field = _cache[id]; if (field instanceof Promise) { + // if the field is still being retrieved, update when the promise is resolved field.then(update); } else { + // otherwise, just execute the update update(field); } }; - function respondToUpdate(diff: any) { - _respondToUpdate(diff); - } - - function connected() { - _socket.emit(MessageStore.Bar.Message, GUID); - } - Utils.AddServerHandler(_socket, MessageStore.Foo, connected); - Utils.AddServerHandler(_socket, MessageStore.UpdateField, respondToUpdate); } \ No newline at end of file diff --git a/src/client/util/SerializationHelper.ts b/src/client/util/SerializationHelper.ts index 7ded85e43..ea8af3834 100644 --- a/src/client/util/SerializationHelper.ts +++ b/src/client/util/SerializationHelper.ts @@ -7,7 +7,7 @@ export namespace SerializationHelper { return serializing > 0; } - export function Serialize(obj: Field): any { + export function serialize(obj: Field): any { if (obj === undefined || obj === null) { return undefined; } @@ -124,7 +124,7 @@ export namespace Deserializable { export function autoObject(): PropSchema { return custom( - (s) => SerializationHelper.Serialize(s), + (s) => SerializationHelper.serialize(s), (s) => SerializationHelper.Deserialize(s) ); } \ No newline at end of file diff --git a/src/new_fields/Doc.ts b/src/new_fields/Doc.ts index af65f5482..9da8912fe 100644 --- a/src/new_fields/Doc.ts +++ b/src/new_fields/Doc.ts @@ -80,7 +80,7 @@ export class Doc extends RefField { }); this[SelfProxy] = doc; if (!id || forceSave) { - DocServer.CreateField(doc); + DocServer.createField(doc); } return doc; } @@ -108,7 +108,7 @@ export class Doc extends RefField { private ___fields: any = {}; private [Update] = (diff: any) => { - DocServer.UpdateField(this[Id], diff); + DocServer.emitFieldUpdate(this[Id], diff); } private [Self] = this; diff --git a/src/new_fields/util.ts b/src/new_fields/util.ts index 8caceb063..7709d6c24 100644 --- a/src/new_fields/util.ts +++ b/src/new_fields/util.ts @@ -43,7 +43,7 @@ export const setter = action(function (target: any, prop: string | symbol | numb } else { target.__fields[prop] = value; } - target[Update]({ '$set': { ["fields." + prop]: value instanceof ObjectField ? SerializationHelper.Serialize(value) : (value === undefined ? null : value) } }); + target[Update]({ '$set': { ["fields." + prop]: value instanceof ObjectField ? SerializationHelper.serialize(value) : (value === undefined ? null : value) } }); UndoManager.AddEvent({ redo: () => receiver[prop] = value, undo: () => receiver[prop] = curValue @@ -103,7 +103,7 @@ export function updateFunction(target: any, prop: any, value: any, receiver: any let current = ObjectField.MakeCopy(value); return (diff?: any) => { if (true || !diff) { - diff = { '$set': { ["fields." + prop]: SerializationHelper.Serialize(value) } }; + diff = { '$set': { ["fields." + prop]: SerializationHelper.serialize(value) } }; const oldValue = current; const newValue = ObjectField.MakeCopy(value); current = newValue; diff --git a/src/server/index.ts b/src/server/index.ts index fd66c90b4..d9fe9d93d 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -301,16 +301,16 @@ let clients: Map = {}; server.on("connection", function (socket: Socket) { console.log("a user has connected"); - Utils.Emit(socket, MessageStore.Foo, "handshooken"); + Utils.emit(socket, MessageStore.Foo, "handshooken"); - Utils.AddServerHandler(socket, MessageStore.Bar, barReceived); - Utils.AddServerHandler(socket, MessageStore.SetField, (args) => setField(socket, args)); + Utils.addServerHandler(socket, MessageStore.Bar, barReceived); + Utils.addServerHandler(socket, MessageStore.SetField, (args) => setField(socket, args)); Utils.AddServerHandlerCallback(socket, MessageStore.GetField, getField); Utils.AddServerHandlerCallback(socket, MessageStore.GetFields, getFields); - Utils.AddServerHandler(socket, MessageStore.DeleteAll, deleteFields); + Utils.addServerHandler(socket, MessageStore.DeleteAll, deleteFields); - Utils.AddServerHandler(socket, MessageStore.CreateField, CreateField); - Utils.AddServerHandler(socket, MessageStore.UpdateField, diff => UpdateField(socket, diff)); + Utils.addServerHandler(socket, MessageStore.CreateField, CreateField); + Utils.addServerHandler(socket, MessageStore.UpdateField, diff => UpdateField(socket, diff)); Utils.AddServerHandlerCallback(socket, MessageStore.GetRefField, GetRefField); Utils.AddServerHandlerCallback(socket, MessageStore.GetRefFields, GetRefFields); }); -- cgit v1.2.3-70-g09d2 From de0304b2966ebdede9d9db8c510e19020046115c Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Mon, 17 Jun 2019 13:38:15 -0400 Subject: peripheral renaming fixes --- src/client/documents/Documents.ts | 4 ++-- src/client/util/History.ts | 6 +++--- src/client/util/SearchUtil.ts | 4 ++-- src/client/util/TooltipTextMenu.tsx | 8 ++++---- src/client/views/MainView.tsx | 10 +++++----- src/client/views/SearchBox.tsx | 6 +++--- src/client/views/collections/CollectionDockingView.tsx | 10 +++++----- src/client/views/collections/CollectionSubView.tsx | 8 ++++---- src/client/views/collections/CollectionVideoView.tsx | 2 +- src/client/views/collections/DockedFrameRenderer.tsx | 2 +- .../collections/collectionFreeForm/CollectionFreeFormView.tsx | 4 ++-- .../views/collections/collectionFreeForm/MarqueeView.tsx | 2 +- src/client/views/nodes/DocumentView.tsx | 2 +- src/client/views/nodes/FormattedTextBox.tsx | 6 +++--- src/client/views/nodes/PDFBox.tsx | 2 +- src/client/views/nodes/VideoBox.tsx | 2 +- src/debug/Viewer.tsx | 2 +- src/mobile/ImageUpload.tsx | 4 ++-- src/new_fields/Proxy.ts | 2 +- src/server/authentication/models/current_user_utils.ts | 6 +++--- 20 files changed, 46 insertions(+), 46 deletions(-) (limited to 'src') diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index b10954636..758291b9b 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -98,7 +98,7 @@ export namespace Docs { // non-guid string ids for each document prototype let protoIds = [textProtoId, histoProtoId, collProtoId, imageProtoId, webProtoId, kvpProtoId, videoProtoId, audioProtoId, pdfProtoId, iconProtoId] // fetch the actual prototype documents from the server - let actualProtos = await DocServer.GetRefFields(protoIds); + let actualProtos = await DocServer.getRefFields(protoIds); // initialize prototype documents textProto = actualProtos[textProtoId] as Doc || CreateTextProto(); @@ -363,7 +363,7 @@ export namespace Docs { CurrentUserUtils.AddNorthstarSchema(schema, schemaDoc); const docs = schemaDocuments; CurrentUserUtils.GetAllNorthstarColumnAttributes(schema).map(attr => { - DocServer.GetRefField(attr.displayName! + ".alias").then(action((field: Opt) => { + DocServer.getRefField(attr.displayName! + ".alias").then(action((field: Opt) => { if (field instanceof Doc) { docs.push(field); } else { diff --git a/src/client/util/History.ts b/src/client/util/History.ts index 545ea8629..94bfcbe09 100644 --- a/src/client/util/History.ts +++ b/src/client/util/History.ts @@ -88,7 +88,7 @@ export namespace HistoryUtil { } export function createUrl(params: ParsedUrl): string { - let baseUrl = DocServer.prepend(`/${params.type}`); + let baseUrl = DocServer.Util.prepend(`/${params.type}`); switch (params.type) { case "doc": const initializers = encodeURIComponent(JSON.stringify(params.initializers)); @@ -103,7 +103,7 @@ export namespace HistoryUtil { } export async function initDoc(id: string, initializer: DocInitializerList) { - const doc = await DocServer.GetRefField(id); + const doc = await DocServer.getRefField(id); if (!(doc instanceof Doc)) { return; } @@ -111,7 +111,7 @@ export namespace HistoryUtil { } async function onDocUrl(url: DocUrl) { - const field = await DocServer.GetRefField(url.docId); + const field = await DocServer.getRefField(url.docId); await Promise.all(Object.keys(url.initializers).map(id => initDoc(id, url.initializers[id]))); if (field instanceof Doc) { MainView.Instance.openWorkspace(field, true); diff --git a/src/client/util/SearchUtil.ts b/src/client/util/SearchUtil.ts index 28ec8ca14..9dd9acbb7 100644 --- a/src/client/util/SearchUtil.ts +++ b/src/client/util/SearchUtil.ts @@ -7,13 +7,13 @@ export namespace SearchUtil { export function Search(query: string, returnDocs: true): Promise; export function Search(query: string, returnDocs: false): Promise; export async function Search(query: string, returnDocs: boolean) { - const ids = JSON.parse(await rp.get(DocServer.prepend("/search"), { + const ids = JSON.parse(await rp.get(DocServer.Util.prepend("/search"), { qs: { query } })); if (!returnDocs) { return ids; } - const docMap = await DocServer.GetRefFields(ids); + const docMap = await DocServer.getRefFields(ids); return ids.map((id: string) => docMap[id]).filter((doc: any) => doc instanceof Doc); } diff --git a/src/client/util/TooltipTextMenu.tsx b/src/client/util/TooltipTextMenu.tsx index fa2483db5..36219a99e 100644 --- a/src/client/util/TooltipTextMenu.tsx +++ b/src/client/util/TooltipTextMenu.tsx @@ -187,9 +187,9 @@ export class TooltipTextMenu { let link = node && node.marks.find(m => m.type.name === "link"); if (link) { let href: string = link.attrs.href; - if (href.indexOf(DocServer.prepend("/doc/")) === 0) { - let docid = href.replace(DocServer.prepend("/doc/"), ""); - DocServer.GetRefField(docid).then(action((f: Opt) => { + if (href.indexOf(DocServer.Util.prepend("/doc/")) === 0) { + let docid = href.replace(DocServer.Util.prepend("/doc/"), ""); + DocServer.getRefField(docid).then(action((f: Opt) => { if (f instanceof Doc) { if (DocumentManager.Instance.getDocumentView(f)) { DocumentManager.Instance.getDocumentView(f)!.props.focus(f); @@ -218,7 +218,7 @@ export class TooltipTextMenu { handlers: { dragComplete: action(() => { let m = dragData.droppedDocuments; - this.makeLink(DocServer.prepend("/doc/" + m[0][Id])); + this.makeLink(DocServer.Util.prepend("/doc/" + m[0][Id])); }), }, hideSource: false diff --git a/src/client/views/MainView.tsx b/src/client/views/MainView.tsx index 984db0426..734961b56 100644 --- a/src/client/views/MainView.tsx +++ b/src/client/views/MainView.tsx @@ -76,11 +76,11 @@ export class MainView extends React.Component { // causes errors to be generated when modifying an observable outside of an action configure({ enforceActions: "observed" }); if (window.location.search.includes("readonly")) { - DocServer.makeReadOnly(); + DocServer.Util.makeReadOnly(); } if (window.location.search.includes("safe")) { if (!window.location.search.includes("nro")) { - DocServer.makeReadOnly(); + DocServer.Util.makeReadOnly(); } CollectionBaseView.SetSafeMode(true); } @@ -141,7 +141,7 @@ export class MainView extends React.Component { this.createNewWorkspace(); } } else { - DocServer.GetRefField(CurrentUserUtils.MainDocId).then(field => + DocServer.getRefField(CurrentUserUtils.MainDocId).then(field => field instanceof Doc ? this.openWorkspace(field) : this.createNewWorkspace(CurrentUserUtils.MainDocId)); } @@ -294,7 +294,7 @@ export class MainView extends React.Component { let logoutRef = React.createRef(); return [ - , + ,
+
]; } diff --git a/src/client/views/SearchBox.tsx b/src/client/views/SearchBox.tsx index 7164d98a4..973715876 100644 --- a/src/client/views/SearchBox.tsx +++ b/src/client/views/SearchBox.tsx @@ -56,13 +56,13 @@ export class SearchBox extends React.Component { @action getResults = async (query: string) => { - let response = await rp.get(DocServer.prepend('/search'), { + let response = await rp.get(DocServer.Util.prepend('/search'), { qs: { query } }); let res: string[] = JSON.parse(response); - const fields = await DocServer.GetRefFields(res); + const fields = await DocServer.getRefFields(res); const docs: Doc[] = []; for (const id of res) { const field = fields[id]; @@ -74,7 +74,7 @@ export class SearchBox extends React.Component { } public static async convertDataUri(imageUri: string, returnedFilename: string) { try { - let posting = DocServer.prepend(RouteStore.dataUriToImage); + let posting = DocServer.Util.prepend(RouteStore.dataUriToImage); const returnedUri = await rp.post(posting, { body: { uri: imageUri, diff --git a/src/client/views/collections/CollectionDockingView.tsx b/src/client/views/collections/CollectionDockingView.tsx index e2bcb10ec..4f5837590 100644 --- a/src/client/views/collections/CollectionDockingView.tsx +++ b/src/client/views/collections/CollectionDockingView.tsx @@ -306,7 +306,7 @@ export class CollectionDockingView extends React.Component) => + DocServer.getRefField(docid).then(action(async (sourceDoc: Opt) => (sourceDoc instanceof Doc) && DragLinksAsDocuments(tab, x, y, sourceDoc))); } else if ((className === "lm_title" || className === "lm_tab lm_active") && !e.shiftKey) { @@ -320,7 +320,7 @@ export class CollectionDockingView extends React.Component) => { + DocServer.getRefField(docid).then(action((f: Opt) => { if (f instanceof Doc) { DragManager.StartDocumentDrag([tab], new DragManager.DocumentDragData([f]), x, y, { @@ -372,7 +372,7 @@ export class CollectionDockingView extends React.Component { + DocServer.getRefField(tab.contentItem.config.props.documentId).then(async doc => { if (doc instanceof Doc) { let counter: any = this.htmlToElement(`0
`); tab.element.append(counter); @@ -409,7 +409,7 @@ export class CollectionDockingView extends React.Component { - let doc = await DocServer.GetRefField(contentItem.config.props.documentId); + let doc = await DocServer.getRefField(contentItem.config.props.documentId); if (doc instanceof Doc) { let theDoc = doc; CollectionDockingView.TopLevel._removedDocs.push(theDoc); diff --git a/src/client/views/collections/CollectionSubView.tsx b/src/client/views/collections/CollectionSubView.tsx index 440a2410b..36e276d13 100644 --- a/src/client/views/collections/CollectionSubView.tsx +++ b/src/client/views/collections/CollectionSubView.tsx @@ -133,7 +133,7 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { if (path.includes(window.location.hostname)) { let s = path.split('/'); let id = s[s.length - 1]; - DocServer.GetRefField(id).then(field => { + DocServer.getRefField(id).then(field => { if (field instanceof Doc) { let alias = Doc.MakeAlias(field); alias.x = options.x || 0; @@ -170,8 +170,8 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { if (html && html.indexOf(document.location.origin)) { // prosemirror text containing link to dash document let start = html.indexOf(window.location.origin); let path = html.substr(start, html.length - start); - let docid = path.substr(0, path.indexOf("\">")).replace(DocServer.prepend("/doc/"), "").split("?")[0]; - DocServer.GetRefField(docid).then(f => (f instanceof Doc) && this.props.addDocument(f, false)); + let docid = path.substr(0, path.indexOf("\">")).replace(DocServer.Util.prepend("/doc/"), "").split("?")[0]; + DocServer.getRefField(docid).then(f => (f instanceof Doc) && this.props.addDocument(f, false)); return; } if (html && html.indexOf("(schemaCtor: (doc: Doc) => T) { if (item.kind === "string" && item.type.indexOf("uri") !== -1) { let str: string; let prom = new Promise(resolve => e.dataTransfer.items[i].getAsString(resolve)) - .then(action((s: string) => rp.head(DocServer.prepend(RouteStore.corsProxy + "/" + (str = s))))) + .then(action((s: string) => rp.head(DocServer.Util.prepend(RouteStore.corsProxy + "/" + (str = s))))) .then(result => { let type = result["content-type"]; if (type) { diff --git a/src/client/views/collections/CollectionVideoView.tsx b/src/client/views/collections/CollectionVideoView.tsx index bd5cd5450..ccbac9915 100644 --- a/src/client/views/collections/CollectionVideoView.tsx +++ b/src/client/views/collections/CollectionVideoView.tsx @@ -97,7 +97,7 @@ export class CollectionVideoView extends React.Component { let filename = encodeURIComponent("snapshot" + this.props.Document.title + "_" + this.props.Document.curPage).replace(/\./g, ""); SearchBox.convertDataUri(dataUrl, filename).then((returnedFilename) => { if (returnedFilename) { - let url = DocServer.prepend(returnedFilename); + let url = DocServer.Util.prepend(returnedFilename); let imageSummary = Docs.Create.ImageDocument(url, { x: NumCast(this.props.Document.x) + width, y: NumCast(this.props.Document.y), width: 150, height: height / width * 150, title: "--snapshot" + NumCast(this.props.Document.curPage) + " image-" diff --git a/src/client/views/collections/DockedFrameRenderer.tsx b/src/client/views/collections/DockedFrameRenderer.tsx index 25d4b2a49..1e7c5661b 100644 --- a/src/client/views/collections/DockedFrameRenderer.tsx +++ b/src/client/views/collections/DockedFrameRenderer.tsx @@ -42,7 +42,7 @@ export class DockedFrameRenderer extends React.Component { } constructor(props: any) { super(props); - DocServer.GetRefField(this.props.documentId).then(action((f: Opt) => this._document = f as Doc)); + DocServer.getRefField(this.props.documentId).then(action((f: Opt) => this._document = f as Doc)); } nativeWidth = () => NumCast(this._document!.nativeWidth, this._panelWidth); diff --git a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx index 9d19df540..cd613e6ab 100644 --- a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx +++ b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx @@ -235,8 +235,8 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { e.preventDefault(); let start = html.indexOf(window.location.origin); let path = html.substr(start, html.length - start); - let docid = path.substr(0, path.indexOf("\">")).replace(DocServer.prepend("/doc/"), "").split("?")[0]; - DocServer.GetRefField(docid).then(f => { + let docid = path.substr(0, path.indexOf("\">")).replace(DocServer.Util.prepend("/doc/"), "").split("?")[0]; + DocServer.getRefField(docid).then(f => { if (f instanceof Doc) { f.x = pt[0]; f.y = pt[1]; diff --git a/src/client/views/collections/collectionFreeForm/MarqueeView.tsx b/src/client/views/collections/collectionFreeForm/MarqueeView.tsx index cd386abfa..07a58ed64 100644 --- a/src/client/views/collections/collectionFreeForm/MarqueeView.tsx +++ b/src/client/views/collections/collectionFreeForm/MarqueeView.tsx @@ -306,7 +306,7 @@ export class MarqueeView extends React.Component let summary = Docs.Create.TextDocument({ x: bounds.left, y: bounds.top, width: 300, height: 100, backgroundColor: "#e2ad32" /* yellow */, title: "-summary-" }); SearchBox.convertDataUri(dataUrl, "icon" + summary[Id] + "_image").then((returnedFilename) => { if (returnedFilename) { - let url = DocServer.prepend(returnedFilename); + let url = DocServer.Util.prepend(returnedFilename); let imageSummary = Docs.Create.ImageDocument(url, { x: bounds.left, y: bounds.top + 100 / zoomBasis, width: 150, height: bounds.height / bounds.width * 150, title: "-summary image-" diff --git a/src/client/views/nodes/DocumentView.tsx b/src/client/views/nodes/DocumentView.tsx index 16e40000d..fdcb20e9a 100644 --- a/src/client/views/nodes/DocumentView.tsx +++ b/src/client/views/nodes/DocumentView.tsx @@ -422,7 +422,7 @@ export class DocumentView extends DocComponent(Docu }, icon: "search" }); cm.addItem({ description: "Center View", event: () => this.props.focus(this.props.Document), icon: "crosshairs" }); - cm.addItem({ description: "Copy URL", event: () => Utils.CopyText(DocServer.prepend("/doc/" + this.props.Document[Id])), icon: "link" }); + cm.addItem({ description: "Copy URL", event: () => Utils.CopyText(DocServer.Util.prepend("/doc/" + this.props.Document[Id])), icon: "link" }); cm.addItem({ description: "Copy ID", event: () => Utils.CopyText(this.props.Document[Id]), icon: "fingerprint" }); cm.addItem({ description: "Delete", event: this.deleteClicked, icon: "trash" }); if (!this.topMost) { diff --git a/src/client/views/nodes/FormattedTextBox.tsx b/src/client/views/nodes/FormattedTextBox.tsx index d00a4b928..6a14a04f7 100644 --- a/src/client/views/nodes/FormattedTextBox.tsx +++ b/src/client/views/nodes/FormattedTextBox.tsx @@ -237,9 +237,9 @@ export class FormattedTextBox extends DocComponent<(FieldViewProps & FormattedTe href = parent.childNodes[0].href; } if (href) { - if (href.indexOf(DocServer.prepend("/doc/")) === 0) { - let docid = href.replace(DocServer.prepend("/doc/"), "").split("?")[0]; - DocServer.GetRefField(docid).then(f => { + if (href.indexOf(DocServer.Util.prepend("/doc/")) === 0) { + let docid = href.replace(DocServer.Util.prepend("/doc/"), "").split("?")[0]; + DocServer.getRefField(docid).then(f => { (f instanceof Doc) && DocumentManager.Instance.jumpToDocument(f, ctrlKey, document => this.props.addDocTab(document, "inTab")) }); } diff --git a/src/client/views/nodes/PDFBox.tsx b/src/client/views/nodes/PDFBox.tsx index aa29a7170..df9e49b64 100644 --- a/src/client/views/nodes/PDFBox.tsx +++ b/src/client/views/nodes/PDFBox.tsx @@ -257,7 +257,7 @@ export class PDFBox extends DocComponent(PdfDocumen .then(action((dataUrl: string) => { SearchBox.convertDataUri(dataUrl, "icon" + this.Document[Id] + "_" + this.curPage).then((returnedFilename) => { if (returnedFilename) { - let url = DocServer.prepend(returnedFilename); + let url = DocServer.Util.prepend(returnedFilename); this.props.Document.thumbnail = new ImageField(new URL(url)); } runInAction(() => this._renderAsSvg = true); diff --git a/src/client/views/nodes/VideoBox.tsx b/src/client/views/nodes/VideoBox.tsx index 35ecf12f6..9ab607e91 100644 --- a/src/client/views/nodes/VideoBox.tsx +++ b/src/client/views/nodes/VideoBox.tsx @@ -97,7 +97,7 @@ export class VideoBox extends DocComponent(VideoD }; try { let responseSchema: any = {}; - const videoInfoResponse = await rp.get(DocServer.prepend(RouteStore.corsProxy + "/" + `https://www.youtube.com/watch?v=${videoId}`), videoInfoRequestConfig); + const videoInfoResponse = await rp.get(DocServer.Util.prepend(RouteStore.corsProxy + "/" + `https://www.youtube.com/watch?v=${videoId}`), videoInfoRequestConfig); const dataHtml = videoInfoResponse; const start = dataHtml.indexOf('ytplayer.config = ') + 18; const end = dataHtml.indexOf(';ytplayer.load'); diff --git a/src/debug/Viewer.tsx b/src/debug/Viewer.tsx index b22300d0b..753149756 100644 --- a/src/debug/Viewer.tsx +++ b/src/debug/Viewer.tsx @@ -146,7 +146,7 @@ class Viewer extends React.Component { @action onKeyPress = (e: React.KeyboardEvent) => { if (e.key === "Enter") { - DocServer.GetRefField(this.idToAdd).then(action((field: any) => { + DocServer.getRefField(this.idToAdd).then(action((field: any) => { if (field !== undefined) { this.fields.push(field); } diff --git a/src/mobile/ImageUpload.tsx b/src/mobile/ImageUpload.tsx index a8f94b746..df597e0a9 100644 --- a/src/mobile/ImageUpload.tsx +++ b/src/mobile/ImageUpload.tsx @@ -57,11 +57,11 @@ class Uploader extends React.Component { this.status = "getting user document"; - const res = await rp.get(DocServer.prepend(RouteStore.getUserDocumentId)); + const res = await rp.get(DocServer.Util.prepend(RouteStore.getUserDocumentId)); if (!res) { throw new Error("No user id returned"); } - const field = await DocServer.GetRefField(res); + const field = await DocServer.getRefField(res); let pending: Opt; if (field instanceof Doc) { pending = await Cast(field.optionalRightCollection, Doc); diff --git a/src/new_fields/Proxy.ts b/src/new_fields/Proxy.ts index 130ec066e..230e4ab8b 100644 --- a/src/new_fields/Proxy.ts +++ b/src/new_fields/Proxy.ts @@ -57,7 +57,7 @@ export class ProxyField extends ObjectField { return undefined; } if (!this.promise) { - this.promise = DocServer.GetRefField(this.fieldId).then(action((field: any) => { + this.promise = DocServer.getRefField(this.fieldId).then(action((field: any) => { this.promise = undefined; this.cache = field; if (field === undefined) this.failed = true; diff --git a/src/server/authentication/models/current_user_utils.ts b/src/server/authentication/models/current_user_utils.ts index 169be3b99..95c20d2db 100644 --- a/src/server/authentication/models/current_user_utils.ts +++ b/src/server/authentication/models/current_user_utils.ts @@ -40,7 +40,7 @@ export class CurrentUserUtils { } public static async loadCurrentUser(): Promise { - let userPromise = rp.get(DocServer.prepend(RouteStore.getCurrUser)).then(response => { + let userPromise = rp.get(DocServer.Util.prepend(RouteStore.getCurrUser)).then(response => { if (response) { let obj = JSON.parse(response); CurrentUserUtils.curr_id = obj.id as string; @@ -49,9 +49,9 @@ export class CurrentUserUtils { throw new Error("There should be a user! Why does Dash think there isn't one?"); } }); - let userDocPromise = await rp.get(DocServer.prepend(RouteStore.getUserDocumentId)).then(id => { + let userDocPromise = await rp.get(DocServer.Util.prepend(RouteStore.getUserDocumentId)).then(id => { if (id) { - return DocServer.GetRefField(id).then(field => + return DocServer.getRefField(id).then(field => runInAction(() => this.user_document = field instanceof Doc ? field : this.createUserDocument(id))); } else { throw new Error("There should be a user id! Why does Dash think there isn't one?"); -- cgit v1.2.3-70-g09d2 From 4bec1d89eff45d6dcbb4041bc211db88d9da1c8f Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 18 Jun 2019 22:02:58 -0400 Subject: fixed serialization typo and added draft of python word doc scraper to git directory for safety --- src/buxton/scraper.py | 128 +++++++++++++++++++++++++++++++++ src/client/DocServer.ts | 2 +- src/client/util/SerializationHelper.ts | 4 +- src/new_fields/util.ts | 4 +- 4 files changed, 133 insertions(+), 5 deletions(-) create mode 100644 src/buxton/scraper.py (limited to 'src') diff --git a/src/buxton/scraper.py b/src/buxton/scraper.py new file mode 100644 index 000000000..0abebb485 --- /dev/null +++ b/src/buxton/scraper.py @@ -0,0 +1,128 @@ +import os +import docx2txt +from docx import Document +from docx.opc.constants import RELATIONSHIP_TYPE as RT +import re +from pymongo import MongoClient +import shutil +import uuid + +source = "./source" +dist = "./Dash-Web/src/server/public/files" + +collection_handle = MongoClient("localhost", 27017)["Dash"]["buxton"] + +def extract_links(fileName): + links = [] + doc = Document(fileName) + rels = doc.part.rels + for rel in rels: + item = rels[rel] + if item.reltype == RT.HYPERLINK and ".aspx" not in item._target: + links.append(item._target) + return links + +def extract_value(kv_string): + return kv_string.split(":")[1].strip() + +def mkdir_if_absent(path): + try: + if not os.path.exists(path): + os.mkdir(path) + except OSError: + print("Failed to create the appropriate directory structures for %s" % file_name) + +def parse_document(file_name: str): + result = {} + pure_name = file_name.split(".")[0] + + dir_path = dist + "/" + pure_name + mkdir_if_absent(dir_path) + + raw = str(docx2txt.process(source + "/" + file_name, dir_path)) + + sanitize = lambda line: re.sub("[\n\t]+", "", line).strip().replace(u"\u00A0", " ").replace(u"\u2013", "-").replace(u"\u201c", '''"''').replace(u"\u201d", '''"''') + remove_empty = lambda line: len(line) > 1 + + lines = list(map(sanitize, raw.split("\n"))) + lines = list(filter(remove_empty, lines)) + + result["file_name"] = file_name + result["title"] = lines[2] + result["short_description"] = lines[3].replace("Short Description: ", "") + + cur = 5 + notes = "" + while lines[cur] != "Device Details": + notes += lines[cur] + " " + cur += 1 + result["buxton_notes"] = notes.strip() + + cur += 1 + clean = list(map(lambda data: data.strip().split(":"), lines[cur].split("|"))) + result["company"] = clean[0][1].strip() + result["year"] = clean[1][1].strip() + result["original_price"] = clean[2][1].strip() + + cur += 1 + result["degrees_of_freedom"] = extract_value(lines[cur]) + cur += 1 + result["dimensions"] = extract_value(lines[cur]) + + cur += 2 + result["primary_key"] = extract_value(lines[cur]) + cur += 1 + result["secondary_key"] = extract_value(lines[cur]) + + result["hyperlinks"] = extract_links(source + "/" + file_name) + + cur += 2 + link_descriptions = [] + while lines[cur] != "Image": + link_descriptions.append(lines[cur]) + cur += 1 + result["link_descriptions"] = link_descriptions + + images = [] + captions = [] + cur += 3 + while cur + 1 < len(lines) and lines[cur] != "NOTES:": + images.append(lines[cur]) + captions.append(lines[cur + 1]) + cur += 2 + result["images"] = images + result["captions"] = captions + + notes = [] + if (cur < len(lines) and lines[cur] == "NOTES:"): + cur += 1 + while cur < len(lines): + notes.append(lines[cur]) + cur += 1 + result["notes"] = notes + + return result + +def upload(document): + wrapper = {} + wrapper["_id"] = str(uuid.uuid4()) + wrapper["fields"] = document + wrapper["__type"] = "Doc" + collection_handle.insert_one(wrapper) + +if os.path.exists(dist): + shutil.rmtree(dist) +while (os.path.exists(dist)): + pass +os.mkdir(dist) + +for file_name in os.listdir(source): + if file_name.endswith('.docx'): + upload(parse_document(file_name)) + +lines = ['*', '!.gitignore'] +with open(dist + "/.gitignore", 'w') as f: + f.write('\n'.join(lines)) + + + diff --git a/src/client/DocServer.ts b/src/client/DocServer.ts index ad7c706b6..3b33657eb 100644 --- a/src/client/DocServer.ts +++ b/src/client/DocServer.ts @@ -257,7 +257,7 @@ export namespace DocServer { */ let _createField = (field: RefField) => { _cache[field[Id]] = field; - const initialState = SerializationHelper.serialize(field); + const initialState = SerializationHelper.Serialize(field); Utils.emit(_socket, MessageStore.CreateField, initialState); }; diff --git a/src/client/util/SerializationHelper.ts b/src/client/util/SerializationHelper.ts index ea8af3834..7ded85e43 100644 --- a/src/client/util/SerializationHelper.ts +++ b/src/client/util/SerializationHelper.ts @@ -7,7 +7,7 @@ export namespace SerializationHelper { return serializing > 0; } - export function serialize(obj: Field): any { + export function Serialize(obj: Field): any { if (obj === undefined || obj === null) { return undefined; } @@ -124,7 +124,7 @@ export namespace Deserializable { export function autoObject(): PropSchema { return custom( - (s) => SerializationHelper.serialize(s), + (s) => SerializationHelper.Serialize(s), (s) => SerializationHelper.Deserialize(s) ); } \ No newline at end of file diff --git a/src/new_fields/util.ts b/src/new_fields/util.ts index 7709d6c24..8caceb063 100644 --- a/src/new_fields/util.ts +++ b/src/new_fields/util.ts @@ -43,7 +43,7 @@ export const setter = action(function (target: any, prop: string | symbol | numb } else { target.__fields[prop] = value; } - target[Update]({ '$set': { ["fields." + prop]: value instanceof ObjectField ? SerializationHelper.serialize(value) : (value === undefined ? null : value) } }); + target[Update]({ '$set': { ["fields." + prop]: value instanceof ObjectField ? SerializationHelper.Serialize(value) : (value === undefined ? null : value) } }); UndoManager.AddEvent({ redo: () => receiver[prop] = value, undo: () => receiver[prop] = curValue @@ -103,7 +103,7 @@ export function updateFunction(target: any, prop: any, value: any, receiver: any let current = ObjectField.MakeCopy(value); return (diff?: any) => { if (true || !diff) { - diff = { '$set': { ["fields." + prop]: SerializationHelper.serialize(value) } }; + diff = { '$set': { ["fields." + prop]: SerializationHelper.Serialize(value) } }; const oldValue = current; const newValue = ObjectField.MakeCopy(value); current = newValue; -- cgit v1.2.3-70-g09d2 From 5147528ef76ed069d7c5f1fc1feb7404c92227bc Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 19 Jun 2019 02:19:03 -0400 Subject: first pass at all documents, improved scraping for handling variation --- src/.DS_Store | Bin 6148 -> 6148 bytes src/buxton/scraper.py | 69 ++++++++++++++------- src/buxton/source/Bill_Notes_Bill_Notes_CyKey.docx | Bin 0 -> 1675500 bytes src/buxton/source/Bill_Notes_Braun_T3.docx | Bin 0 -> 1671968 bytes src/buxton/source/Bill_Notes_CasioC801.docx | Bin 0 -> 574664 bytes src/buxton/source/Bill_Notes_Casio_Mini.docx | Bin 0 -> 581069 bytes .../source/Bill_Notes_FingerWorks_Prototype.docx | Bin 0 -> 585090 bytes .../source/Bill_Notes_Fingerworks_TouchStream.docx | Bin 0 -> 1722555 bytes src/buxton/source/Bill_Notes_FrogPad.docx | Bin 0 -> 840173 bytes src/buxton/source/Bill_Notes_Gavilan_SC.docx | Bin 0 -> 1695290 bytes .../source/Bill_Notes_Grandjean_Stenotype.docx | Bin 0 -> 2094142 bytes src/buxton/source/Bill_Notes_Matias.docx | Bin 0 -> 590407 bytes src/buxton/source/Bill_Notes_MousePen.docx | Bin 0 -> 505322 bytes src/buxton/source/Bill_Notes_NewO.docx | Bin 0 -> 2264571 bytes src/buxton/source/Bill_Notes_OLPC.docx | Bin 0 -> 6883659 bytes src/buxton/source/Bill_Notes_PARCkbd.docx | Bin 0 -> 631959 bytes .../source/Bill_Notes_Philco_Mystery_Control.docx | Bin 0 -> 1994439 bytes src/buxton/source/Bill_Notes_TASA_Kbd.docx | Bin 0 -> 461199 bytes src/buxton/source/Bill_Notes_The_Tap.docx | Bin 0 -> 711321 bytes 19 files changed, 46 insertions(+), 23 deletions(-) create mode 100644 src/buxton/source/Bill_Notes_Bill_Notes_CyKey.docx create mode 100644 src/buxton/source/Bill_Notes_Braun_T3.docx create mode 100644 src/buxton/source/Bill_Notes_CasioC801.docx create mode 100644 src/buxton/source/Bill_Notes_Casio_Mini.docx create mode 100644 src/buxton/source/Bill_Notes_FingerWorks_Prototype.docx create mode 100644 src/buxton/source/Bill_Notes_Fingerworks_TouchStream.docx create mode 100644 src/buxton/source/Bill_Notes_FrogPad.docx create mode 100644 src/buxton/source/Bill_Notes_Gavilan_SC.docx create mode 100644 src/buxton/source/Bill_Notes_Grandjean_Stenotype.docx create mode 100644 src/buxton/source/Bill_Notes_Matias.docx create mode 100644 src/buxton/source/Bill_Notes_MousePen.docx create mode 100644 src/buxton/source/Bill_Notes_NewO.docx create mode 100644 src/buxton/source/Bill_Notes_OLPC.docx create mode 100644 src/buxton/source/Bill_Notes_PARCkbd.docx create mode 100644 src/buxton/source/Bill_Notes_Philco_Mystery_Control.docx create mode 100644 src/buxton/source/Bill_Notes_TASA_Kbd.docx create mode 100644 src/buxton/source/Bill_Notes_The_Tap.docx (limited to 'src') diff --git a/src/.DS_Store b/src/.DS_Store index d70e95c0a..071dafa1e 100644 Binary files a/src/.DS_Store and b/src/.DS_Store differ diff --git a/src/buxton/scraper.py b/src/buxton/scraper.py index 0abebb485..854c99379 100644 --- a/src/buxton/scraper.py +++ b/src/buxton/scraper.py @@ -8,9 +8,12 @@ import shutil import uuid source = "./source" -dist = "./Dash-Web/src/server/public/files" +dist = "../server/public/files" + +db = MongoClient("localhost", 27017)["Dash"] +db.buxton.drop() +collection_handle = db.buxton -collection_handle = MongoClient("localhost", 27017)["Dash"]["buxton"] def extract_links(fileName): links = [] @@ -22,8 +25,11 @@ def extract_links(fileName): links.append(item._target) return links + def extract_value(kv_string): - return kv_string.split(":")[1].strip() + pieces = kv_string.split(":") + return (pieces[1] if len(pieces) > 1 else kv_string).strip() + def mkdir_if_absent(path): try: @@ -32,6 +38,7 @@ def mkdir_if_absent(path): except OSError: print("Failed to create the appropriate directory structures for %s" % file_name) + def parse_document(file_name: str): result = {} pure_name = file_name.split(".")[0] @@ -41,15 +48,18 @@ def parse_document(file_name: str): raw = str(docx2txt.process(source + "/" + file_name, dir_path)) - sanitize = lambda line: re.sub("[\n\t]+", "", line).strip().replace(u"\u00A0", " ").replace(u"\u2013", "-").replace(u"\u201c", '''"''').replace(u"\u201d", '''"''') - remove_empty = lambda line: len(line) > 1 + def sanitize(line): return re.sub("[\n\t]+", "", line).replace(u"\u00A0", " ").replace( + u"\u2013", "-").replace(u"\u201c", '''"''').replace(u"\u201d", '''"''').strip() + + def remove_empty(line): return len(line) > 1 lines = list(map(sanitize, raw.split("\n"))) lines = list(filter(remove_empty, lines)) result["file_name"] = file_name - result["title"] = lines[2] - result["short_description"] = lines[3].replace("Short Description: ", "") + result["title"] = lines[2].strip() + result["short_description"] = lines[3].strip().replace( + "Short Description: ", "") cur = 5 notes = "" @@ -57,32 +67,44 @@ def parse_document(file_name: str): notes += lines[cur] + " " cur += 1 result["buxton_notes"] = notes.strip() - + cur += 1 - clean = list(map(lambda data: data.strip().split(":"), lines[cur].split("|"))) - result["company"] = clean[0][1].strip() - result["year"] = clean[1][1].strip() - result["original_price"] = clean[2][1].strip() + clean = list( + map(lambda data: data.strip().split(":"), lines[cur].split("|"))) + result["company"] = clean[0][len(clean[0]) - 1].strip() + result["year"] = clean[1][len(clean[1]) - 1].strip() + result["original_price"] = clean[2][len(clean[2]) - 1].strip() cur += 1 result["degrees_of_freedom"] = extract_value(lines[cur]) cur += 1 - result["dimensions"] = extract_value(lines[cur]) - cur += 2 + dimensions = lines[cur].lower() + if dimensions.startswith("dimensions"): + result["dimensions"] = dimensions[11:].strip() + cur += 1 + while lines[cur] != "Key Words": + result["dimensions"] += (" " + lines[cur].strip()) + cur += 1 + + cur += 1 result["primary_key"] = extract_value(lines[cur]) cur += 1 result["secondary_key"] = extract_value(lines[cur]) - result["hyperlinks"] = extract_links(source + "/" + file_name) + while lines[cur] != "Links": + result["secondary_key"] += (" " + extract_value(lines[cur]).strip()) + cur += 1 - cur += 2 + cur += 1 link_descriptions = [] while lines[cur] != "Image": - link_descriptions.append(lines[cur]) + link_descriptions.append(lines[cur].strip()) cur += 1 result["link_descriptions"] = link_descriptions + result["hyperlinks"] = extract_links(source + "/" + file_name) + images = [] captions = [] cur += 3 @@ -99,9 +121,11 @@ def parse_document(file_name: str): while cur < len(lines): notes.append(lines[cur]) cur += 1 - result["notes"] = notes + if len(notes) > 0: + result["notes"] = notes + + return result - return result def upload(document): wrapper = {} @@ -110,11 +134,13 @@ def upload(document): wrapper["__type"] = "Doc" collection_handle.insert_one(wrapper) + if os.path.exists(dist): shutil.rmtree(dist) -while (os.path.exists(dist)): +while os.path.exists(dist): pass os.mkdir(dist) +mkdir_if_absent(source) for file_name in os.listdir(source): if file_name.endswith('.docx'): @@ -123,6 +149,3 @@ for file_name in os.listdir(source): lines = ['*', '!.gitignore'] with open(dist + "/.gitignore", 'w') as f: f.write('\n'.join(lines)) - - - diff --git a/src/buxton/source/Bill_Notes_Bill_Notes_CyKey.docx b/src/buxton/source/Bill_Notes_Bill_Notes_CyKey.docx new file mode 100644 index 000000000..06094b4d3 Binary files /dev/null and b/src/buxton/source/Bill_Notes_Bill_Notes_CyKey.docx differ diff --git a/src/buxton/source/Bill_Notes_Braun_T3.docx b/src/buxton/source/Bill_Notes_Braun_T3.docx new file mode 100644 index 000000000..356697092 Binary files /dev/null and b/src/buxton/source/Bill_Notes_Braun_T3.docx differ diff --git a/src/buxton/source/Bill_Notes_CasioC801.docx b/src/buxton/source/Bill_Notes_CasioC801.docx new file mode 100644 index 000000000..cd89fb97b Binary files /dev/null and b/src/buxton/source/Bill_Notes_CasioC801.docx differ diff --git a/src/buxton/source/Bill_Notes_Casio_Mini.docx b/src/buxton/source/Bill_Notes_Casio_Mini.docx new file mode 100644 index 000000000..a503cddfc Binary files /dev/null and b/src/buxton/source/Bill_Notes_Casio_Mini.docx differ diff --git a/src/buxton/source/Bill_Notes_FingerWorks_Prototype.docx b/src/buxton/source/Bill_Notes_FingerWorks_Prototype.docx new file mode 100644 index 000000000..4d13a8cf5 Binary files /dev/null and b/src/buxton/source/Bill_Notes_FingerWorks_Prototype.docx differ diff --git a/src/buxton/source/Bill_Notes_Fingerworks_TouchStream.docx b/src/buxton/source/Bill_Notes_Fingerworks_TouchStream.docx new file mode 100644 index 000000000..578a1be08 Binary files /dev/null and b/src/buxton/source/Bill_Notes_Fingerworks_TouchStream.docx differ diff --git a/src/buxton/source/Bill_Notes_FrogPad.docx b/src/buxton/source/Bill_Notes_FrogPad.docx new file mode 100644 index 000000000..d01e1bf5c Binary files /dev/null and b/src/buxton/source/Bill_Notes_FrogPad.docx differ diff --git a/src/buxton/source/Bill_Notes_Gavilan_SC.docx b/src/buxton/source/Bill_Notes_Gavilan_SC.docx new file mode 100644 index 000000000..7bd28b376 Binary files /dev/null and b/src/buxton/source/Bill_Notes_Gavilan_SC.docx differ diff --git a/src/buxton/source/Bill_Notes_Grandjean_Stenotype.docx b/src/buxton/source/Bill_Notes_Grandjean_Stenotype.docx new file mode 100644 index 000000000..0615c4953 Binary files /dev/null and b/src/buxton/source/Bill_Notes_Grandjean_Stenotype.docx differ diff --git a/src/buxton/source/Bill_Notes_Matias.docx b/src/buxton/source/Bill_Notes_Matias.docx new file mode 100644 index 000000000..547603256 Binary files /dev/null and b/src/buxton/source/Bill_Notes_Matias.docx differ diff --git a/src/buxton/source/Bill_Notes_MousePen.docx b/src/buxton/source/Bill_Notes_MousePen.docx new file mode 100644 index 000000000..4e1056636 Binary files /dev/null and b/src/buxton/source/Bill_Notes_MousePen.docx differ diff --git a/src/buxton/source/Bill_Notes_NewO.docx b/src/buxton/source/Bill_Notes_NewO.docx new file mode 100644 index 000000000..a514926d2 Binary files /dev/null and b/src/buxton/source/Bill_Notes_NewO.docx differ diff --git a/src/buxton/source/Bill_Notes_OLPC.docx b/src/buxton/source/Bill_Notes_OLPC.docx new file mode 100644 index 000000000..bfca0a9bb Binary files /dev/null and b/src/buxton/source/Bill_Notes_OLPC.docx differ diff --git a/src/buxton/source/Bill_Notes_PARCkbd.docx b/src/buxton/source/Bill_Notes_PARCkbd.docx new file mode 100644 index 000000000..c0cf6ba9a Binary files /dev/null and b/src/buxton/source/Bill_Notes_PARCkbd.docx differ diff --git a/src/buxton/source/Bill_Notes_Philco_Mystery_Control.docx b/src/buxton/source/Bill_Notes_Philco_Mystery_Control.docx new file mode 100644 index 000000000..ad06903f3 Binary files /dev/null and b/src/buxton/source/Bill_Notes_Philco_Mystery_Control.docx differ diff --git a/src/buxton/source/Bill_Notes_TASA_Kbd.docx b/src/buxton/source/Bill_Notes_TASA_Kbd.docx new file mode 100644 index 000000000..e4c659de9 Binary files /dev/null and b/src/buxton/source/Bill_Notes_TASA_Kbd.docx differ diff --git a/src/buxton/source/Bill_Notes_The_Tap.docx b/src/buxton/source/Bill_Notes_The_Tap.docx new file mode 100644 index 000000000..8ceebc71e Binary files /dev/null and b/src/buxton/source/Bill_Notes_The_Tap.docx differ -- cgit v1.2.3-70-g09d2 From 48d2ece574f613c084a083ae05148498dd0030e3 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 19 Jun 2019 14:13:46 -0400 Subject: implemented simulation of view and data docs --- src/buxton/scraper.py | 122 +++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 110 insertions(+), 12 deletions(-) (limited to 'src') diff --git a/src/buxton/scraper.py b/src/buxton/scraper.py index 854c99379..a3bbc75ac 100644 --- a/src/buxton/scraper.py +++ b/src/buxton/scraper.py @@ -6,13 +6,14 @@ import re from pymongo import MongoClient import shutil import uuid +import datetime +from PIL import Image source = "./source" dist = "../server/public/files" db = MongoClient("localhost", 27017)["Dash"] -db.buxton.drop() -collection_handle = db.buxton +view_doc_guids = [] def extract_links(fileName): @@ -39,7 +40,71 @@ def mkdir_if_absent(path): print("Failed to create the appropriate directory structures for %s" % file_name) +def guid(): + return str(uuid.uuid4()) + + +def write_image(folder, name): + path = f"http://localhost:1050/files/{folder}/{name}" + + data_doc_guid = guid() + view_doc_guid = guid() + + view_doc = { + "_id": view_doc_guid, + "fields": { + "proto": { + "fieldId": data_doc_guid, + "__type": "proxy" + }, + "x": 10, + "y": 10, + "width": 300, + "zIndex": 2, + "libraryBrush": False + }, + "__type": "Doc" + } + + image = Image.open(f"{dist}/{folder}/{name}") + native_width, native_height = image.size + + data_doc = { + "_id": data_doc_guid, + "fields": { + "proto": { + "_id": "imageProto", + "__type": "proxy" + }, + "data": { + "url": path, + "type": "image" + }, + "title": name, + "nativeWidth": native_width, + "author": "Bill Buxton", + "creationDate": { + "date": datetime.datetime.utcnow().microsecond, + "__type": "date" + }, + "isPrototype": True, + "page": -1, + "nativeHeight": native_height, + "height": native_height + }, + "__type": "Doc" + } + + db.newDocuments.insert_one(view_doc) + db.newDocuments.insert_one(data_doc) + + print(path) + + return view_doc_guid + + def parse_document(file_name: str): + print(f"Parsing {file_name}...") result = {} pure_name = file_name.split(".")[0] @@ -48,6 +113,11 @@ def parse_document(file_name: str): raw = str(docx2txt.process(source + "/" + file_name, dir_path)) + print("Extracting images...\n") + for image in os.listdir(dir_path): + view_doc_guids.append(write_image(pure_name, image)) + print() + def sanitize(line): return re.sub("[\n\t]+", "", line).replace(u"\u00A0", " ").replace( u"\u2013", "-").replace(u"\u201c", '''"''').replace(u"\u201d", '''"''').strip() @@ -76,16 +146,20 @@ def parse_document(file_name: str): result["original_price"] = clean[2][len(clean[2]) - 1].strip() cur += 1 - result["degrees_of_freedom"] = extract_value(lines[cur]) + result["degrees_of_freedom"] = extract_value( + lines[cur]).replace("NA", "N/A") cur += 1 dimensions = lines[cur].lower() if dimensions.startswith("dimensions"): - result["dimensions"] = dimensions[11:].strip() + dim_concat = dimensions[11:].strip() cur += 1 while lines[cur] != "Key Words": - result["dimensions"] += (" " + lines[cur].strip()) + dim_concat += (" " + lines[cur].strip()) cur += 1 + result["dimensions"] = dim_concat + else: + result["dimensions"] = "N/A" cur += 1 result["primary_key"] = extract_value(lines[cur]) @@ -124,15 +198,22 @@ def parse_document(file_name: str): if len(notes) > 0: result["notes"] = notes + print("...contents dictionary constructed.") + return result -def upload(document): - wrapper = {} - wrapper["_id"] = str(uuid.uuid4()) - wrapper["fields"] = document - wrapper["__type"] = "Doc" - collection_handle.insert_one(wrapper) +def wrap(document): + return { + "_id": guid(), + "fields": document, + "__type": "Doc" + } + + +def upload(collection, mongofied): + for doc in mongofied: + collection.insert_one(doc) if os.path.exists(dist): @@ -142,9 +223,26 @@ while os.path.exists(dist): os.mkdir(dist) mkdir_if_absent(source) +candidates = 0 +mongofied = [] for file_name in os.listdir(source): if file_name.endswith('.docx'): - upload(parse_document(file_name)) + candidates += 1 + mongofied.append(wrap(parse_document(file_name))) + +for doc in mongofied: + db.newDocuments.insert_one(doc) + +proxified = list( + map(lambda guid: {"fieldId": guid, "type": "proxy"}, view_doc_guids)) +db.newDocuments.update_one( + {"fields.title": "WS collection 1"}, + {"$push": {"fields.data.fields": {"$each": proxified}}} +) + +print("...dictionaries written to Dash Document.\n") + +print(f"{candidates} candidates processed.") lines = ['*', '!.gitignore'] with open(dist + "/.gitignore", 'w') as f: -- cgit v1.2.3-70-g09d2 From cbb2f4191e31d72c8c727976b5616983af15af45 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 19 Jun 2019 14:38:13 -0400 Subject: Image import working! --- src/buxton/scraper.py | 9 ++++++--- src/buxton/source/Bill_Notes_Braun_T3.docx | Bin 1671968 -> 0 bytes src/buxton/source/Bill_Notes_CasioC801.docx | Bin 574664 -> 0 bytes src/buxton/source/Bill_Notes_Casio_Mini.docx | Bin 581069 -> 0 bytes .../source/Bill_Notes_FingerWorks_Prototype.docx | Bin 585090 -> 0 bytes .../source/Bill_Notes_Fingerworks_TouchStream.docx | Bin 1722555 -> 0 bytes src/buxton/source/Bill_Notes_FrogPad.docx | Bin 840173 -> 0 bytes src/buxton/source/Bill_Notes_Gavilan_SC.docx | Bin 1695290 -> 0 bytes src/buxton/source/Bill_Notes_Grandjean_Stenotype.docx | Bin 2094142 -> 0 bytes src/buxton/source/Bill_Notes_Matias.docx | Bin 590407 -> 0 bytes src/buxton/source/Bill_Notes_MousePen.docx | Bin 505322 -> 0 bytes src/buxton/source/Bill_Notes_NewO.docx | Bin 2264571 -> 0 bytes src/buxton/source/Bill_Notes_OLPC.docx | Bin 6883659 -> 0 bytes src/buxton/source/Bill_Notes_PARCkbd.docx | Bin 631959 -> 0 bytes .../source/Bill_Notes_Philco_Mystery_Control.docx | Bin 1994439 -> 0 bytes src/buxton/source/Bill_Notes_TASA_Kbd.docx | Bin 461199 -> 0 bytes src/buxton/source/Bill_Notes_The_Tap.docx | Bin 711321 -> 0 bytes src/buxton/source/Extra/Bill_Notes_Braun_T3.docx | Bin 0 -> 1671968 bytes src/buxton/source/Extra/Bill_Notes_CasioC801.docx | Bin 0 -> 574664 bytes src/buxton/source/Extra/Bill_Notes_Casio_Mini.docx | Bin 0 -> 581069 bytes .../Extra/Bill_Notes_FingerWorks_Prototype.docx | Bin 0 -> 585090 bytes .../Extra/Bill_Notes_Fingerworks_TouchStream.docx | Bin 0 -> 1722555 bytes src/buxton/source/Extra/Bill_Notes_FrogPad.docx | Bin 0 -> 840173 bytes src/buxton/source/Extra/Bill_Notes_Gavilan_SC.docx | Bin 0 -> 1695290 bytes .../source/Extra/Bill_Notes_Grandjean_Stenotype.docx | Bin 0 -> 2094142 bytes src/buxton/source/Extra/Bill_Notes_Matias.docx | Bin 0 -> 590407 bytes src/buxton/source/Extra/Bill_Notes_MousePen.docx | Bin 0 -> 505322 bytes src/buxton/source/Extra/Bill_Notes_NewO.docx | Bin 0 -> 2264571 bytes src/buxton/source/Extra/Bill_Notes_OLPC.docx | Bin 0 -> 6883659 bytes src/buxton/source/Extra/Bill_Notes_PARCkbd.docx | Bin 0 -> 631959 bytes .../Extra/Bill_Notes_Philco_Mystery_Control.docx | Bin 0 -> 1994439 bytes src/buxton/source/Extra/Bill_Notes_TASA_Kbd.docx | Bin 0 -> 461199 bytes src/buxton/source/Extra/Bill_Notes_The_Tap.docx | Bin 0 -> 711321 bytes 33 files changed, 6 insertions(+), 3 deletions(-) delete mode 100644 src/buxton/source/Bill_Notes_Braun_T3.docx delete mode 100644 src/buxton/source/Bill_Notes_CasioC801.docx delete mode 100644 src/buxton/source/Bill_Notes_Casio_Mini.docx delete mode 100644 src/buxton/source/Bill_Notes_FingerWorks_Prototype.docx delete mode 100644 src/buxton/source/Bill_Notes_Fingerworks_TouchStream.docx delete mode 100644 src/buxton/source/Bill_Notes_FrogPad.docx delete mode 100644 src/buxton/source/Bill_Notes_Gavilan_SC.docx delete mode 100644 src/buxton/source/Bill_Notes_Grandjean_Stenotype.docx delete mode 100644 src/buxton/source/Bill_Notes_Matias.docx delete mode 100644 src/buxton/source/Bill_Notes_MousePen.docx delete mode 100644 src/buxton/source/Bill_Notes_NewO.docx delete mode 100644 src/buxton/source/Bill_Notes_OLPC.docx delete mode 100644 src/buxton/source/Bill_Notes_PARCkbd.docx delete mode 100644 src/buxton/source/Bill_Notes_Philco_Mystery_Control.docx delete mode 100644 src/buxton/source/Bill_Notes_TASA_Kbd.docx delete mode 100644 src/buxton/source/Bill_Notes_The_Tap.docx create mode 100644 src/buxton/source/Extra/Bill_Notes_Braun_T3.docx create mode 100644 src/buxton/source/Extra/Bill_Notes_CasioC801.docx create mode 100644 src/buxton/source/Extra/Bill_Notes_Casio_Mini.docx create mode 100644 src/buxton/source/Extra/Bill_Notes_FingerWorks_Prototype.docx create mode 100644 src/buxton/source/Extra/Bill_Notes_Fingerworks_TouchStream.docx create mode 100644 src/buxton/source/Extra/Bill_Notes_FrogPad.docx create mode 100644 src/buxton/source/Extra/Bill_Notes_Gavilan_SC.docx create mode 100644 src/buxton/source/Extra/Bill_Notes_Grandjean_Stenotype.docx create mode 100644 src/buxton/source/Extra/Bill_Notes_Matias.docx create mode 100644 src/buxton/source/Extra/Bill_Notes_MousePen.docx create mode 100644 src/buxton/source/Extra/Bill_Notes_NewO.docx create mode 100644 src/buxton/source/Extra/Bill_Notes_OLPC.docx create mode 100644 src/buxton/source/Extra/Bill_Notes_PARCkbd.docx create mode 100644 src/buxton/source/Extra/Bill_Notes_Philco_Mystery_Control.docx create mode 100644 src/buxton/source/Extra/Bill_Notes_TASA_Kbd.docx create mode 100644 src/buxton/source/Extra/Bill_Notes_The_Tap.docx (limited to 'src') diff --git a/src/buxton/scraper.py b/src/buxton/scraper.py index a3bbc75ac..c7efd8f09 100644 --- a/src/buxton/scraper.py +++ b/src/buxton/scraper.py @@ -78,7 +78,7 @@ def write_image(folder, name): }, "data": { "url": path, - "type": "image" + "__type": "image" }, "title": name, "nativeWidth": native_width, @@ -105,9 +105,10 @@ def write_image(folder, name): def parse_document(file_name: str): print(f"Parsing {file_name}...") - result = {} pure_name = file_name.split(".")[0] + result = {} + dir_path = dist + "/" + pure_name mkdir_if_absent(dir_path) @@ -116,6 +117,8 @@ def parse_document(file_name: str): print("Extracting images...\n") for image in os.listdir(dir_path): view_doc_guids.append(write_image(pure_name, image)) + os.rename(dir_path + "/" + image, dir_path + + "/" + image.replace(".", "_m.", 1)) print() def sanitize(line): return re.sub("[\n\t]+", "", line).replace(u"\u00A0", " ").replace( @@ -234,7 +237,7 @@ for doc in mongofied: db.newDocuments.insert_one(doc) proxified = list( - map(lambda guid: {"fieldId": guid, "type": "proxy"}, view_doc_guids)) + map(lambda guid: {"fieldId": guid, "__type": "proxy"}, view_doc_guids)) db.newDocuments.update_one( {"fields.title": "WS collection 1"}, {"$push": {"fields.data.fields": {"$each": proxified}}} diff --git a/src/buxton/source/Bill_Notes_Braun_T3.docx b/src/buxton/source/Bill_Notes_Braun_T3.docx deleted file mode 100644 index 356697092..000000000 Binary files a/src/buxton/source/Bill_Notes_Braun_T3.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_CasioC801.docx b/src/buxton/source/Bill_Notes_CasioC801.docx deleted file mode 100644 index cd89fb97b..000000000 Binary files a/src/buxton/source/Bill_Notes_CasioC801.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_Casio_Mini.docx b/src/buxton/source/Bill_Notes_Casio_Mini.docx deleted file mode 100644 index a503cddfc..000000000 Binary files a/src/buxton/source/Bill_Notes_Casio_Mini.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_FingerWorks_Prototype.docx b/src/buxton/source/Bill_Notes_FingerWorks_Prototype.docx deleted file mode 100644 index 4d13a8cf5..000000000 Binary files a/src/buxton/source/Bill_Notes_FingerWorks_Prototype.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_Fingerworks_TouchStream.docx b/src/buxton/source/Bill_Notes_Fingerworks_TouchStream.docx deleted file mode 100644 index 578a1be08..000000000 Binary files a/src/buxton/source/Bill_Notes_Fingerworks_TouchStream.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_FrogPad.docx b/src/buxton/source/Bill_Notes_FrogPad.docx deleted file mode 100644 index d01e1bf5c..000000000 Binary files a/src/buxton/source/Bill_Notes_FrogPad.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_Gavilan_SC.docx b/src/buxton/source/Bill_Notes_Gavilan_SC.docx deleted file mode 100644 index 7bd28b376..000000000 Binary files a/src/buxton/source/Bill_Notes_Gavilan_SC.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_Grandjean_Stenotype.docx b/src/buxton/source/Bill_Notes_Grandjean_Stenotype.docx deleted file mode 100644 index 0615c4953..000000000 Binary files a/src/buxton/source/Bill_Notes_Grandjean_Stenotype.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_Matias.docx b/src/buxton/source/Bill_Notes_Matias.docx deleted file mode 100644 index 547603256..000000000 Binary files a/src/buxton/source/Bill_Notes_Matias.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_MousePen.docx b/src/buxton/source/Bill_Notes_MousePen.docx deleted file mode 100644 index 4e1056636..000000000 Binary files a/src/buxton/source/Bill_Notes_MousePen.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_NewO.docx b/src/buxton/source/Bill_Notes_NewO.docx deleted file mode 100644 index a514926d2..000000000 Binary files a/src/buxton/source/Bill_Notes_NewO.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_OLPC.docx b/src/buxton/source/Bill_Notes_OLPC.docx deleted file mode 100644 index bfca0a9bb..000000000 Binary files a/src/buxton/source/Bill_Notes_OLPC.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_PARCkbd.docx b/src/buxton/source/Bill_Notes_PARCkbd.docx deleted file mode 100644 index c0cf6ba9a..000000000 Binary files a/src/buxton/source/Bill_Notes_PARCkbd.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_Philco_Mystery_Control.docx b/src/buxton/source/Bill_Notes_Philco_Mystery_Control.docx deleted file mode 100644 index ad06903f3..000000000 Binary files a/src/buxton/source/Bill_Notes_Philco_Mystery_Control.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_TASA_Kbd.docx b/src/buxton/source/Bill_Notes_TASA_Kbd.docx deleted file mode 100644 index e4c659de9..000000000 Binary files a/src/buxton/source/Bill_Notes_TASA_Kbd.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_The_Tap.docx b/src/buxton/source/Bill_Notes_The_Tap.docx deleted file mode 100644 index 8ceebc71e..000000000 Binary files a/src/buxton/source/Bill_Notes_The_Tap.docx and /dev/null differ diff --git a/src/buxton/source/Extra/Bill_Notes_Braun_T3.docx b/src/buxton/source/Extra/Bill_Notes_Braun_T3.docx new file mode 100644 index 000000000..356697092 Binary files /dev/null and b/src/buxton/source/Extra/Bill_Notes_Braun_T3.docx differ diff --git a/src/buxton/source/Extra/Bill_Notes_CasioC801.docx b/src/buxton/source/Extra/Bill_Notes_CasioC801.docx new file mode 100644 index 000000000..cd89fb97b Binary files /dev/null and b/src/buxton/source/Extra/Bill_Notes_CasioC801.docx differ diff --git a/src/buxton/source/Extra/Bill_Notes_Casio_Mini.docx b/src/buxton/source/Extra/Bill_Notes_Casio_Mini.docx new file mode 100644 index 000000000..a503cddfc Binary files /dev/null and b/src/buxton/source/Extra/Bill_Notes_Casio_Mini.docx differ diff --git a/src/buxton/source/Extra/Bill_Notes_FingerWorks_Prototype.docx b/src/buxton/source/Extra/Bill_Notes_FingerWorks_Prototype.docx new file mode 100644 index 000000000..4d13a8cf5 Binary files /dev/null and b/src/buxton/source/Extra/Bill_Notes_FingerWorks_Prototype.docx differ diff --git a/src/buxton/source/Extra/Bill_Notes_Fingerworks_TouchStream.docx b/src/buxton/source/Extra/Bill_Notes_Fingerworks_TouchStream.docx new file mode 100644 index 000000000..578a1be08 Binary files /dev/null and b/src/buxton/source/Extra/Bill_Notes_Fingerworks_TouchStream.docx differ diff --git a/src/buxton/source/Extra/Bill_Notes_FrogPad.docx b/src/buxton/source/Extra/Bill_Notes_FrogPad.docx new file mode 100644 index 000000000..d01e1bf5c Binary files /dev/null and b/src/buxton/source/Extra/Bill_Notes_FrogPad.docx differ diff --git a/src/buxton/source/Extra/Bill_Notes_Gavilan_SC.docx b/src/buxton/source/Extra/Bill_Notes_Gavilan_SC.docx new file mode 100644 index 000000000..7bd28b376 Binary files /dev/null and b/src/buxton/source/Extra/Bill_Notes_Gavilan_SC.docx differ diff --git a/src/buxton/source/Extra/Bill_Notes_Grandjean_Stenotype.docx b/src/buxton/source/Extra/Bill_Notes_Grandjean_Stenotype.docx new file mode 100644 index 000000000..0615c4953 Binary files /dev/null and b/src/buxton/source/Extra/Bill_Notes_Grandjean_Stenotype.docx differ diff --git a/src/buxton/source/Extra/Bill_Notes_Matias.docx b/src/buxton/source/Extra/Bill_Notes_Matias.docx new file mode 100644 index 000000000..547603256 Binary files /dev/null and b/src/buxton/source/Extra/Bill_Notes_Matias.docx differ diff --git a/src/buxton/source/Extra/Bill_Notes_MousePen.docx b/src/buxton/source/Extra/Bill_Notes_MousePen.docx new file mode 100644 index 000000000..4e1056636 Binary files /dev/null and b/src/buxton/source/Extra/Bill_Notes_MousePen.docx differ diff --git a/src/buxton/source/Extra/Bill_Notes_NewO.docx b/src/buxton/source/Extra/Bill_Notes_NewO.docx new file mode 100644 index 000000000..a514926d2 Binary files /dev/null and b/src/buxton/source/Extra/Bill_Notes_NewO.docx differ diff --git a/src/buxton/source/Extra/Bill_Notes_OLPC.docx b/src/buxton/source/Extra/Bill_Notes_OLPC.docx new file mode 100644 index 000000000..bfca0a9bb Binary files /dev/null and b/src/buxton/source/Extra/Bill_Notes_OLPC.docx differ diff --git a/src/buxton/source/Extra/Bill_Notes_PARCkbd.docx b/src/buxton/source/Extra/Bill_Notes_PARCkbd.docx new file mode 100644 index 000000000..c0cf6ba9a Binary files /dev/null and b/src/buxton/source/Extra/Bill_Notes_PARCkbd.docx differ diff --git a/src/buxton/source/Extra/Bill_Notes_Philco_Mystery_Control.docx b/src/buxton/source/Extra/Bill_Notes_Philco_Mystery_Control.docx new file mode 100644 index 000000000..ad06903f3 Binary files /dev/null and b/src/buxton/source/Extra/Bill_Notes_Philco_Mystery_Control.docx differ diff --git a/src/buxton/source/Extra/Bill_Notes_TASA_Kbd.docx b/src/buxton/source/Extra/Bill_Notes_TASA_Kbd.docx new file mode 100644 index 000000000..e4c659de9 Binary files /dev/null and b/src/buxton/source/Extra/Bill_Notes_TASA_Kbd.docx differ diff --git a/src/buxton/source/Extra/Bill_Notes_The_Tap.docx b/src/buxton/source/Extra/Bill_Notes_The_Tap.docx new file mode 100644 index 000000000..8ceebc71e Binary files /dev/null and b/src/buxton/source/Extra/Bill_Notes_The_Tap.docx differ -- cgit v1.2.3-70-g09d2 From 5202cb26929a4bfe7b0881473ebcdebb06e91248 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 19 Jun 2019 17:44:56 -0400 Subject: proof of concept, imports metadata-embedded schemas --- src/buxton/scraper.py | 113 +++++++++++++++------ src/buxton/source/Bill_Notes_Braun_T3.docx | Bin 0 -> 1671968 bytes src/buxton/source/Bill_Notes_CasioC801.docx | Bin 0 -> 574664 bytes src/buxton/source/Bill_Notes_Casio_Mini.docx | Bin 0 -> 581069 bytes .../source/Bill_Notes_FingerWorks_Prototype.docx | Bin 0 -> 585090 bytes .../source/Bill_Notes_Fingerworks_TouchStream.docx | Bin 0 -> 1722555 bytes src/buxton/source/Bill_Notes_FrogPad.docx | Bin 0 -> 840173 bytes src/buxton/source/Bill_Notes_Gavilan_SC.docx | Bin 0 -> 1695290 bytes .../source/Bill_Notes_Grandjean_Stenotype.docx | Bin 0 -> 2094142 bytes src/buxton/source/Bill_Notes_Matias.docx | Bin 0 -> 590407 bytes src/buxton/source/Bill_Notes_MousePen.docx | Bin 0 -> 505322 bytes src/buxton/source/Bill_Notes_NewO.docx | Bin 0 -> 2264571 bytes src/buxton/source/Bill_Notes_OLPC.docx | Bin 0 -> 6883659 bytes src/buxton/source/Bill_Notes_PARCkbd.docx | Bin 0 -> 631959 bytes .../source/Bill_Notes_Philco_Mystery_Control.docx | Bin 0 -> 1994439 bytes src/buxton/source/Bill_Notes_TASA_Kbd.docx | Bin 0 -> 461199 bytes src/buxton/source/Bill_Notes_The_Tap.docx | Bin 0 -> 711321 bytes src/buxton/source/Extra/Bill_Notes_Braun_T3.docx | Bin 1671968 -> 0 bytes src/buxton/source/Extra/Bill_Notes_CasioC801.docx | Bin 574664 -> 0 bytes src/buxton/source/Extra/Bill_Notes_Casio_Mini.docx | Bin 581069 -> 0 bytes .../Extra/Bill_Notes_FingerWorks_Prototype.docx | Bin 585090 -> 0 bytes .../Extra/Bill_Notes_Fingerworks_TouchStream.docx | Bin 1722555 -> 0 bytes src/buxton/source/Extra/Bill_Notes_FrogPad.docx | Bin 840173 -> 0 bytes src/buxton/source/Extra/Bill_Notes_Gavilan_SC.docx | Bin 1695290 -> 0 bytes .../Extra/Bill_Notes_Grandjean_Stenotype.docx | Bin 2094142 -> 0 bytes src/buxton/source/Extra/Bill_Notes_Matias.docx | Bin 590407 -> 0 bytes src/buxton/source/Extra/Bill_Notes_MousePen.docx | Bin 505322 -> 0 bytes src/buxton/source/Extra/Bill_Notes_NewO.docx | Bin 2264571 -> 0 bytes src/buxton/source/Extra/Bill_Notes_OLPC.docx | Bin 6883659 -> 0 bytes src/buxton/source/Extra/Bill_Notes_PARCkbd.docx | Bin 631959 -> 0 bytes .../Extra/Bill_Notes_Philco_Mystery_Control.docx | Bin 1994439 -> 0 bytes src/buxton/source/Extra/Bill_Notes_TASA_Kbd.docx | Bin 461199 -> 0 bytes src/buxton/source/Extra/Bill_Notes_The_Tap.docx | Bin 711321 -> 0 bytes 33 files changed, 80 insertions(+), 33 deletions(-) create mode 100644 src/buxton/source/Bill_Notes_Braun_T3.docx create mode 100644 src/buxton/source/Bill_Notes_CasioC801.docx create mode 100644 src/buxton/source/Bill_Notes_Casio_Mini.docx create mode 100644 src/buxton/source/Bill_Notes_FingerWorks_Prototype.docx create mode 100644 src/buxton/source/Bill_Notes_Fingerworks_TouchStream.docx create mode 100644 src/buxton/source/Bill_Notes_FrogPad.docx create mode 100644 src/buxton/source/Bill_Notes_Gavilan_SC.docx create mode 100644 src/buxton/source/Bill_Notes_Grandjean_Stenotype.docx create mode 100644 src/buxton/source/Bill_Notes_Matias.docx create mode 100644 src/buxton/source/Bill_Notes_MousePen.docx create mode 100644 src/buxton/source/Bill_Notes_NewO.docx create mode 100644 src/buxton/source/Bill_Notes_OLPC.docx create mode 100644 src/buxton/source/Bill_Notes_PARCkbd.docx create mode 100644 src/buxton/source/Bill_Notes_Philco_Mystery_Control.docx create mode 100644 src/buxton/source/Bill_Notes_TASA_Kbd.docx create mode 100644 src/buxton/source/Bill_Notes_The_Tap.docx delete mode 100644 src/buxton/source/Extra/Bill_Notes_Braun_T3.docx delete mode 100644 src/buxton/source/Extra/Bill_Notes_CasioC801.docx delete mode 100644 src/buxton/source/Extra/Bill_Notes_Casio_Mini.docx delete mode 100644 src/buxton/source/Extra/Bill_Notes_FingerWorks_Prototype.docx delete mode 100644 src/buxton/source/Extra/Bill_Notes_Fingerworks_TouchStream.docx delete mode 100644 src/buxton/source/Extra/Bill_Notes_FrogPad.docx delete mode 100644 src/buxton/source/Extra/Bill_Notes_Gavilan_SC.docx delete mode 100644 src/buxton/source/Extra/Bill_Notes_Grandjean_Stenotype.docx delete mode 100644 src/buxton/source/Extra/Bill_Notes_Matias.docx delete mode 100644 src/buxton/source/Extra/Bill_Notes_MousePen.docx delete mode 100644 src/buxton/source/Extra/Bill_Notes_NewO.docx delete mode 100644 src/buxton/source/Extra/Bill_Notes_OLPC.docx delete mode 100644 src/buxton/source/Extra/Bill_Notes_PARCkbd.docx delete mode 100644 src/buxton/source/Extra/Bill_Notes_Philco_Mystery_Control.docx delete mode 100644 src/buxton/source/Extra/Bill_Notes_TASA_Kbd.docx delete mode 100644 src/buxton/source/Extra/Bill_Notes_The_Tap.docx (limited to 'src') diff --git a/src/buxton/scraper.py b/src/buxton/scraper.py index c7efd8f09..c89961f54 100644 --- a/src/buxton/scraper.py +++ b/src/buxton/scraper.py @@ -13,7 +13,7 @@ source = "./source" dist = "../server/public/files" db = MongoClient("localhost", 27017)["Dash"] -view_doc_guids = [] +schema_guids = [] def extract_links(fileName): @@ -24,7 +24,7 @@ def extract_links(fileName): item = rels[rel] if item.reltype == RT.HYPERLINK and ".aspx" not in item._target: links.append(item._target) - return links + return listify(links) def extract_value(kv_string): @@ -44,6 +44,66 @@ def guid(): return str(uuid.uuid4()) +def listify(list): + return { + "fields": list, + "__type": "list" + } + + +def protofy(fieldId): + return { + "fieldId": fieldId, + "__type": "proxy" + } + + +def write_schema(parse_results): + view_guids = parse_results["view_guids"] + + data_doc = parse_results["schema"] + fields = data_doc["fields"] + + view_doc_guid = guid() + + view_doc = { + "_id": view_doc_guid, + "fields": { + "proto": protofy(data_doc["_id"]), + "x": 10, + "y": 10, + "width": 900, + "height": 600, + "panX": 0, + "panY": 0, + "zoomBasis": 0.5, + "zIndex": 2, + "libraryBrush": False, + "viewType": 2 + }, + "__type": "Doc" + } + + fields["proto"] = protofy("collectionProto") + fields["data"] = listify(proxify_guids(view_guids)) + fields["schemaColumns"] = listify(["title", "data"]) + fields["backgroundColor"] = "white" + fields["scale"] = 0.5 + fields["viewType"] = 2 + fields["author"] = "Bill Buxton" + fields["creationDate"] = { + "date": datetime.datetime.utcnow().microsecond, + "__type": "date" + } + fields["isPrototype"] = True + fields["page"] = -1 + + db.newDocuments.insert_one(data_doc) + db.newDocuments.insert_one(view_doc) + + return view_doc_guid + + def write_image(folder, name): path = f"http://localhost:1050/files/{folder}/{name}" @@ -53,10 +113,7 @@ def write_image(folder, name): view_doc = { "_id": view_doc_guid, "fields": { - "proto": { - "fieldId": data_doc_guid, - "__type": "proxy" - }, + "proto": protofy(data_doc_guid), "x": 10, "y": 10, "width": 300, @@ -72,10 +129,7 @@ def write_image(folder, name): data_doc = { "_id": data_doc_guid, "fields": { - "proto": { - "_id": "imageProto", - "__type": "proxy" - }, + "proto": protofy("imageProto"), "data": { "url": path, "__type": "image" @@ -115,8 +169,9 @@ def parse_document(file_name: str): raw = str(docx2txt.process(source + "/" + file_name, dir_path)) print("Extracting images...\n") + view_guids = [] for image in os.listdir(dir_path): - view_doc_guids.append(write_image(pure_name, image)) + view_guids.append(write_image(pure_name, image)) os.rename(dir_path + "/" + image, dir_path + "/" + image.replace(".", "_m.", 1)) print() @@ -178,7 +233,7 @@ def parse_document(file_name: str): while lines[cur] != "Image": link_descriptions.append(lines[cur].strip()) cur += 1 - result["link_descriptions"] = link_descriptions + result["link_descriptions"] = listify(link_descriptions) result["hyperlinks"] = extract_links(source + "/" + file_name) @@ -189,8 +244,8 @@ def parse_document(file_name: str): images.append(lines[cur]) captions.append(lines[cur + 1]) cur += 2 - result["images"] = images - result["captions"] = captions + result["images"] = listify(images) + result["captions"] = listify(captions) notes = [] if (cur < len(lines) and lines[cur] == "NOTES:"): @@ -199,24 +254,22 @@ def parse_document(file_name: str): notes.append(lines[cur]) cur += 1 if len(notes) > 0: - result["notes"] = notes + result["notes"] = listify(notes) print("...contents dictionary constructed.") - return result - - -def wrap(document): return { - "_id": guid(), - "fields": document, - "__type": "Doc" + "schema": { + "_id": guid(), + "fields": result, + "__type": "Doc" + }, + "view_guids": view_guids } -def upload(collection, mongofied): - for doc in mongofied: - collection.insert_one(doc) +def proxify_guids(guids): + return list(map(lambda guid: {"fieldId": guid, "__type": "proxy"}, guids)) if os.path.exists(dist): @@ -227,20 +280,14 @@ os.mkdir(dist) mkdir_if_absent(source) candidates = 0 -mongofied = [] for file_name in os.listdir(source): if file_name.endswith('.docx'): candidates += 1 - mongofied.append(wrap(parse_document(file_name))) - -for doc in mongofied: - db.newDocuments.insert_one(doc) + schema_guids.append(write_schema(parse_document(file_name))) -proxified = list( - map(lambda guid: {"fieldId": guid, "__type": "proxy"}, view_doc_guids)) db.newDocuments.update_one( {"fields.title": "WS collection 1"}, - {"$push": {"fields.data.fields": {"$each": proxified}}} + {"$push": {"fields.data.fields": {"$each": proxify_guids(schema_guids)}}} ) print("...dictionaries written to Dash Document.\n") diff --git a/src/buxton/source/Bill_Notes_Braun_T3.docx b/src/buxton/source/Bill_Notes_Braun_T3.docx new file mode 100644 index 000000000..356697092 Binary files /dev/null and b/src/buxton/source/Bill_Notes_Braun_T3.docx differ diff --git a/src/buxton/source/Bill_Notes_CasioC801.docx b/src/buxton/source/Bill_Notes_CasioC801.docx new file mode 100644 index 000000000..cd89fb97b Binary files /dev/null and b/src/buxton/source/Bill_Notes_CasioC801.docx differ diff --git a/src/buxton/source/Bill_Notes_Casio_Mini.docx b/src/buxton/source/Bill_Notes_Casio_Mini.docx new file mode 100644 index 000000000..a503cddfc Binary files /dev/null and b/src/buxton/source/Bill_Notes_Casio_Mini.docx differ diff --git a/src/buxton/source/Bill_Notes_FingerWorks_Prototype.docx b/src/buxton/source/Bill_Notes_FingerWorks_Prototype.docx new file mode 100644 index 000000000..4d13a8cf5 Binary files /dev/null and b/src/buxton/source/Bill_Notes_FingerWorks_Prototype.docx differ diff --git a/src/buxton/source/Bill_Notes_Fingerworks_TouchStream.docx b/src/buxton/source/Bill_Notes_Fingerworks_TouchStream.docx new file mode 100644 index 000000000..578a1be08 Binary files /dev/null and b/src/buxton/source/Bill_Notes_Fingerworks_TouchStream.docx differ diff --git a/src/buxton/source/Bill_Notes_FrogPad.docx b/src/buxton/source/Bill_Notes_FrogPad.docx new file mode 100644 index 000000000..d01e1bf5c Binary files /dev/null and b/src/buxton/source/Bill_Notes_FrogPad.docx differ diff --git a/src/buxton/source/Bill_Notes_Gavilan_SC.docx b/src/buxton/source/Bill_Notes_Gavilan_SC.docx new file mode 100644 index 000000000..7bd28b376 Binary files /dev/null and b/src/buxton/source/Bill_Notes_Gavilan_SC.docx differ diff --git a/src/buxton/source/Bill_Notes_Grandjean_Stenotype.docx b/src/buxton/source/Bill_Notes_Grandjean_Stenotype.docx new file mode 100644 index 000000000..0615c4953 Binary files /dev/null and b/src/buxton/source/Bill_Notes_Grandjean_Stenotype.docx differ diff --git a/src/buxton/source/Bill_Notes_Matias.docx b/src/buxton/source/Bill_Notes_Matias.docx new file mode 100644 index 000000000..547603256 Binary files /dev/null and b/src/buxton/source/Bill_Notes_Matias.docx differ diff --git a/src/buxton/source/Bill_Notes_MousePen.docx b/src/buxton/source/Bill_Notes_MousePen.docx new file mode 100644 index 000000000..4e1056636 Binary files /dev/null and b/src/buxton/source/Bill_Notes_MousePen.docx differ diff --git a/src/buxton/source/Bill_Notes_NewO.docx b/src/buxton/source/Bill_Notes_NewO.docx new file mode 100644 index 000000000..a514926d2 Binary files /dev/null and b/src/buxton/source/Bill_Notes_NewO.docx differ diff --git a/src/buxton/source/Bill_Notes_OLPC.docx b/src/buxton/source/Bill_Notes_OLPC.docx new file mode 100644 index 000000000..bfca0a9bb Binary files /dev/null and b/src/buxton/source/Bill_Notes_OLPC.docx differ diff --git a/src/buxton/source/Bill_Notes_PARCkbd.docx b/src/buxton/source/Bill_Notes_PARCkbd.docx new file mode 100644 index 000000000..c0cf6ba9a Binary files /dev/null and b/src/buxton/source/Bill_Notes_PARCkbd.docx differ diff --git a/src/buxton/source/Bill_Notes_Philco_Mystery_Control.docx b/src/buxton/source/Bill_Notes_Philco_Mystery_Control.docx new file mode 100644 index 000000000..ad06903f3 Binary files /dev/null and b/src/buxton/source/Bill_Notes_Philco_Mystery_Control.docx differ diff --git a/src/buxton/source/Bill_Notes_TASA_Kbd.docx b/src/buxton/source/Bill_Notes_TASA_Kbd.docx new file mode 100644 index 000000000..e4c659de9 Binary files /dev/null and b/src/buxton/source/Bill_Notes_TASA_Kbd.docx differ diff --git a/src/buxton/source/Bill_Notes_The_Tap.docx b/src/buxton/source/Bill_Notes_The_Tap.docx new file mode 100644 index 000000000..8ceebc71e Binary files /dev/null and b/src/buxton/source/Bill_Notes_The_Tap.docx differ diff --git a/src/buxton/source/Extra/Bill_Notes_Braun_T3.docx b/src/buxton/source/Extra/Bill_Notes_Braun_T3.docx deleted file mode 100644 index 356697092..000000000 Binary files a/src/buxton/source/Extra/Bill_Notes_Braun_T3.docx and /dev/null differ diff --git a/src/buxton/source/Extra/Bill_Notes_CasioC801.docx b/src/buxton/source/Extra/Bill_Notes_CasioC801.docx deleted file mode 100644 index cd89fb97b..000000000 Binary files a/src/buxton/source/Extra/Bill_Notes_CasioC801.docx and /dev/null differ diff --git a/src/buxton/source/Extra/Bill_Notes_Casio_Mini.docx b/src/buxton/source/Extra/Bill_Notes_Casio_Mini.docx deleted file mode 100644 index a503cddfc..000000000 Binary files a/src/buxton/source/Extra/Bill_Notes_Casio_Mini.docx and /dev/null differ diff --git a/src/buxton/source/Extra/Bill_Notes_FingerWorks_Prototype.docx b/src/buxton/source/Extra/Bill_Notes_FingerWorks_Prototype.docx deleted file mode 100644 index 4d13a8cf5..000000000 Binary files a/src/buxton/source/Extra/Bill_Notes_FingerWorks_Prototype.docx and /dev/null differ diff --git a/src/buxton/source/Extra/Bill_Notes_Fingerworks_TouchStream.docx b/src/buxton/source/Extra/Bill_Notes_Fingerworks_TouchStream.docx deleted file mode 100644 index 578a1be08..000000000 Binary files a/src/buxton/source/Extra/Bill_Notes_Fingerworks_TouchStream.docx and /dev/null differ diff --git a/src/buxton/source/Extra/Bill_Notes_FrogPad.docx b/src/buxton/source/Extra/Bill_Notes_FrogPad.docx deleted file mode 100644 index d01e1bf5c..000000000 Binary files a/src/buxton/source/Extra/Bill_Notes_FrogPad.docx and /dev/null differ diff --git a/src/buxton/source/Extra/Bill_Notes_Gavilan_SC.docx b/src/buxton/source/Extra/Bill_Notes_Gavilan_SC.docx deleted file mode 100644 index 7bd28b376..000000000 Binary files a/src/buxton/source/Extra/Bill_Notes_Gavilan_SC.docx and /dev/null differ diff --git a/src/buxton/source/Extra/Bill_Notes_Grandjean_Stenotype.docx b/src/buxton/source/Extra/Bill_Notes_Grandjean_Stenotype.docx deleted file mode 100644 index 0615c4953..000000000 Binary files a/src/buxton/source/Extra/Bill_Notes_Grandjean_Stenotype.docx and /dev/null differ diff --git a/src/buxton/source/Extra/Bill_Notes_Matias.docx b/src/buxton/source/Extra/Bill_Notes_Matias.docx deleted file mode 100644 index 547603256..000000000 Binary files a/src/buxton/source/Extra/Bill_Notes_Matias.docx and /dev/null differ diff --git a/src/buxton/source/Extra/Bill_Notes_MousePen.docx b/src/buxton/source/Extra/Bill_Notes_MousePen.docx deleted file mode 100644 index 4e1056636..000000000 Binary files a/src/buxton/source/Extra/Bill_Notes_MousePen.docx and /dev/null differ diff --git a/src/buxton/source/Extra/Bill_Notes_NewO.docx b/src/buxton/source/Extra/Bill_Notes_NewO.docx deleted file mode 100644 index a514926d2..000000000 Binary files a/src/buxton/source/Extra/Bill_Notes_NewO.docx and /dev/null differ diff --git a/src/buxton/source/Extra/Bill_Notes_OLPC.docx b/src/buxton/source/Extra/Bill_Notes_OLPC.docx deleted file mode 100644 index bfca0a9bb..000000000 Binary files a/src/buxton/source/Extra/Bill_Notes_OLPC.docx and /dev/null differ diff --git a/src/buxton/source/Extra/Bill_Notes_PARCkbd.docx b/src/buxton/source/Extra/Bill_Notes_PARCkbd.docx deleted file mode 100644 index c0cf6ba9a..000000000 Binary files a/src/buxton/source/Extra/Bill_Notes_PARCkbd.docx and /dev/null differ diff --git a/src/buxton/source/Extra/Bill_Notes_Philco_Mystery_Control.docx b/src/buxton/source/Extra/Bill_Notes_Philco_Mystery_Control.docx deleted file mode 100644 index ad06903f3..000000000 Binary files a/src/buxton/source/Extra/Bill_Notes_Philco_Mystery_Control.docx and /dev/null differ diff --git a/src/buxton/source/Extra/Bill_Notes_TASA_Kbd.docx b/src/buxton/source/Extra/Bill_Notes_TASA_Kbd.docx deleted file mode 100644 index e4c659de9..000000000 Binary files a/src/buxton/source/Extra/Bill_Notes_TASA_Kbd.docx and /dev/null differ diff --git a/src/buxton/source/Extra/Bill_Notes_The_Tap.docx b/src/buxton/source/Extra/Bill_Notes_The_Tap.docx deleted file mode 100644 index 8ceebc71e..000000000 Binary files a/src/buxton/source/Extra/Bill_Notes_The_Tap.docx and /dev/null differ -- cgit v1.2.3-70-g09d2 From 7a1358929bb0ad8d0ce9ec4688d37d83e7a37e2c Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 19 Jun 2019 19:43:04 -0400 Subject: finished importing! --- src/buxton/scraper.py | 35 ++++++++++++++++++++++++++++------- 1 file changed, 28 insertions(+), 7 deletions(-) (limited to 'src') diff --git a/src/buxton/scraper.py b/src/buxton/scraper.py index c89961f54..1795f33d1 100644 --- a/src/buxton/scraper.py +++ b/src/buxton/scraper.py @@ -8,6 +8,7 @@ import shutil import uuid import datetime from PIL import Image +import math source = "./source" dist = "../server/public/files" @@ -58,8 +59,8 @@ def protofy(fieldId): } -def write_schema(parse_results): - view_guids = parse_results["view_guids"] +def write_schema(parse_results, display_fields): + view_guids = parse_results["child_guids"] data_doc = parse_results["schema"] fields = data_doc["fields"] @@ -86,7 +87,7 @@ def write_schema(parse_results): fields["proto"] = protofy("collectionProto") fields["data"] = listify(proxify_guids(view_guids)) - fields["schemaColumns"] = listify(["title", "data"]) + fields["schemaColumns"] = listify(display_fields) fields["backgroundColor"] = "white" fields["scale"] = 0.5 fields["viewType"] = 2 @@ -179,6 +180,14 @@ def parse_document(file_name: str): def sanitize(line): return re.sub("[\n\t]+", "", line).replace(u"\u00A0", " ").replace( u"\u2013", "-").replace(u"\u201c", '''"''').replace(u"\u201d", '''"''').strip() + def sanitize_price(raw): + res = math.nan + try: + res = float(raw.replace("$", "")) + except: + res = math.nan + return res + def remove_empty(line): return len(line) > 1 lines = list(map(sanitize, raw.split("\n"))) @@ -201,7 +210,8 @@ def parse_document(file_name: str): map(lambda data: data.strip().split(":"), lines[cur].split("|"))) result["company"] = clean[0][len(clean[0]) - 1].strip() result["year"] = clean[1][len(clean[1]) - 1].strip() - result["original_price"] = clean[2][len(clean[2]) - 1].strip() + result["original_price"] = sanitize_price( + clean[2][len(clean[2]) - 1].strip()) cur += 1 result["degrees_of_freedom"] = extract_value( @@ -264,7 +274,7 @@ def parse_document(file_name: str): "fields": result, "__type": "Doc" }, - "view_guids": view_guids + "child_guids": view_guids } @@ -283,13 +293,24 @@ candidates = 0 for file_name in os.listdir(source): if file_name.endswith('.docx'): candidates += 1 - schema_guids.append(write_schema(parse_document(file_name))) + schema_guids.append(write_schema( + parse_document(file_name), ["title", "data"])) + +parent_guid = write_schema({ + "schema": { + "_id": guid(), + "fields": {}, + "__type": "Doc" + }, + "child_guids": schema_guids +}, ["title", "short_description"]) db.newDocuments.update_one( {"fields.title": "WS collection 1"}, - {"$push": {"fields.data.fields": {"$each": proxify_guids(schema_guids)}}} + {"$push": {"fields.data.fields": {"fieldId": parent_guid, "__type": "proxy"}}} ) + print("...dictionaries written to Dash Document.\n") print(f"{candidates} candidates processed.") -- cgit v1.2.3-70-g09d2 From 462c3c69ef9649a60fc5fa8af947fa1a85c5cfa3 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 20 Jun 2019 12:19:21 -0400 Subject: Cleaned up script and included price sanitization --- src/buxton/scraper.py | 45 ++++++++++++++++++++++++++------------------- 1 file changed, 26 insertions(+), 19 deletions(-) (limited to 'src') diff --git a/src/buxton/scraper.py b/src/buxton/scraper.py index 1795f33d1..20124e157 100644 --- a/src/buxton/scraper.py +++ b/src/buxton/scraper.py @@ -9,6 +9,7 @@ import uuid import datetime from PIL import Image import math +import sys source = "./source" dist = "../server/public/files" @@ -102,6 +103,10 @@ def write_schema(parse_results, display_fields): db.newDocuments.insert_one(data_doc) db.newDocuments.insert_one(view_doc) + data_doc_guid = data_doc["_id"] + print( + f"Uploaded {view_doc_guid} (view) and {data_doc_guid} (data) to http://localhost:27017...\n") + return view_doc_guid @@ -153,8 +158,6 @@ def write_image(folder, name): db.newDocuments.insert_one(view_doc) db.newDocuments.insert_one(data_doc) - print(path) - return view_doc_guid @@ -169,24 +172,29 @@ def parse_document(file_name: str): raw = str(docx2txt.process(source + "/" + file_name, dir_path)) - print("Extracting images...\n") + print("Extracting images...") view_guids = [] for image in os.listdir(dir_path): view_guids.append(write_image(pure_name, image)) os.rename(dir_path + "/" + image, dir_path + "/" + image.replace(".", "_m.", 1)) - print() def sanitize(line): return re.sub("[\n\t]+", "", line).replace(u"\u00A0", " ").replace( u"\u2013", "-").replace(u"\u201c", '''"''').replace(u"\u201d", '''"''').strip() - def sanitize_price(raw): - res = math.nan - try: - res = float(raw.replace("$", "")) - except: - res = math.nan - return res + def sanitize_price(raw: str): + raw = raw.replace(",", "") + start = raw.find("$") + if start > -1: + i = start + 1 + while (i < len(raw) and re.match(r"[0-9\.]", raw[i])): + i += 1 + price = raw[start + 1: i + 1] + return float(price) + elif (raw.lower().find("nfs")): + return -1 + else: + return math.nan def remove_empty(line): return len(line) > 1 @@ -266,8 +274,6 @@ def parse_document(file_name: str): if len(notes) > 0: result["notes"] = listify(notes) - print("...contents dictionary constructed.") - return { "schema": { "_id": guid(), @@ -296,6 +302,7 @@ for file_name in os.listdir(source): schema_guids.append(write_schema( parse_document(file_name), ["title", "data"])) +print("Writing parent schema...") parent_guid = write_schema({ "schema": { "_id": guid(), @@ -303,18 +310,18 @@ parent_guid = write_schema({ "__type": "Doc" }, "child_guids": schema_guids -}, ["title", "short_description"]) +}, ["title", "short_description", "original_price"]) +print("Appending parent schema to main workspace...\n") db.newDocuments.update_one( {"fields.title": "WS collection 1"}, {"$push": {"fields.data.fields": {"fieldId": parent_guid, "__type": "proxy"}}} ) - -print("...dictionaries written to Dash Document.\n") - -print(f"{candidates} candidates processed.") - +print("Rewriting .gitignore...\n") lines = ['*', '!.gitignore'] with open(dist + "/.gitignore", 'w') as f: f.write('\n'.join(lines)) + +suffix = "" if candidates == 1 else "s" +print(f"Done. {candidates} candidate{suffix} processed.") -- cgit v1.2.3-70-g09d2 From 4be53f12fffa099f3381813192e60415898183d1 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 20 Jun 2019 16:51:16 -0400 Subject: updated print statements --- src/buxton/scraper.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) (limited to 'src') diff --git a/src/buxton/scraper.py b/src/buxton/scraper.py index 20124e157..043fd3cf4 100644 --- a/src/buxton/scraper.py +++ b/src/buxton/scraper.py @@ -39,7 +39,7 @@ def mkdir_if_absent(path): if not os.path.exists(path): os.mkdir(path) except OSError: - print("Failed to create the appropriate directory structures for %s" % file_name) + print("failed to create the appropriate directory structures for %s" % file_name) def guid(): @@ -104,8 +104,8 @@ def write_schema(parse_results, display_fields): db.newDocuments.insert_one(view_doc) data_doc_guid = data_doc["_id"] - print( - f"Uploaded {view_doc_guid} (view) and {data_doc_guid} (data) to http://localhost:27017...\n") + print(f"inserted view document ({view_doc_guid})") + print(f"inserted data document ({data_doc_guid})\n") return view_doc_guid @@ -162,7 +162,7 @@ def write_image(folder, name): def parse_document(file_name: str): - print(f"Parsing {file_name}...") + print(f"parsing {file_name}...") pure_name = file_name.split(".")[0] result = {} @@ -172,12 +172,14 @@ def parse_document(file_name: str): raw = str(docx2txt.process(source + "/" + file_name, dir_path)) - print("Extracting images...") view_guids = [] + count = 0 for image in os.listdir(dir_path): + count += 1 view_guids.append(write_image(pure_name, image)) os.rename(dir_path + "/" + image, dir_path + "/" + image.replace(".", "_m.", 1)) + print(f"extracted {count} images...") def sanitize(line): return re.sub("[\n\t]+", "", line).replace(u"\u00A0", " ").replace( u"\u2013", "-").replace(u"\u201c", '''"''').replace(u"\u201d", '''"''').strip() @@ -274,6 +276,8 @@ def parse_document(file_name: str): if len(notes) > 0: result["notes"] = listify(notes) + print("writing child schema...") + return { "schema": { "_id": guid(), @@ -302,7 +306,7 @@ for file_name in os.listdir(source): schema_guids.append(write_schema( parse_document(file_name), ["title", "data"])) -print("Writing parent schema...") +print("writing parent schema...") parent_guid = write_schema({ "schema": { "_id": guid(), @@ -312,16 +316,16 @@ parent_guid = write_schema({ "child_guids": schema_guids }, ["title", "short_description", "original_price"]) -print("Appending parent schema to main workspace...\n") +print("appending parent schema to main workspace...\n") db.newDocuments.update_one( {"fields.title": "WS collection 1"}, {"$push": {"fields.data.fields": {"fieldId": parent_guid, "__type": "proxy"}}} ) -print("Rewriting .gitignore...\n") +print("rewriting .gitignore...\n") lines = ['*', '!.gitignore'] with open(dist + "/.gitignore", 'w') as f: f.write('\n'.join(lines)) suffix = "" if candidates == 1 else "s" -print(f"Done. {candidates} candidate{suffix} processed.") +print(f"conversion complete. {candidates} candidate{suffix} processed.") -- cgit v1.2.3-70-g09d2 From 06bd486c72342b4d979245c9f4051156e6492541 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 25 Jun 2019 21:22:29 -0400 Subject: scraping progress --- src/buxton/scraper.py | 331 --------------------- src/buxton/source/Bill_Notes_Bill_Notes_CyKey.docx | Bin 1675500 -> 0 bytes src/buxton/source/Bill_Notes_Braun_T3.docx | Bin 1671968 -> 0 bytes src/buxton/source/Bill_Notes_CasioC801.docx | Bin 574664 -> 0 bytes src/buxton/source/Bill_Notes_Casio_Mini.docx | Bin 581069 -> 0 bytes .../source/Bill_Notes_FingerWorks_Prototype.docx | Bin 585090 -> 0 bytes .../source/Bill_Notes_Fingerworks_TouchStream.docx | Bin 1722555 -> 0 bytes src/buxton/source/Bill_Notes_FrogPad.docx | Bin 840173 -> 0 bytes src/buxton/source/Bill_Notes_Gavilan_SC.docx | Bin 1695290 -> 0 bytes .../source/Bill_Notes_Grandjean_Stenotype.docx | Bin 2094142 -> 0 bytes src/buxton/source/Bill_Notes_Matias.docx | Bin 590407 -> 0 bytes src/buxton/source/Bill_Notes_MousePen.docx | Bin 505322 -> 0 bytes src/buxton/source/Bill_Notes_NewO.docx | Bin 2264571 -> 0 bytes src/buxton/source/Bill_Notes_OLPC.docx | Bin 6883659 -> 0 bytes src/buxton/source/Bill_Notes_PARCkbd.docx | Bin 631959 -> 0 bytes .../source/Bill_Notes_Philco_Mystery_Control.docx | Bin 1994439 -> 0 bytes src/buxton/source/Bill_Notes_TASA_Kbd.docx | Bin 461199 -> 0 bytes src/buxton/source/Bill_Notes_The_Tap.docx | Bin 711321 -> 0 bytes src/client/util/ClientUtils.ts | 4 + src/scraping/acm/chromedriver.exe | Bin 0 -> 7477760 bytes src/scraping/acm/citations.txt | 2 + src/scraping/acm/debug.log | 38 +++ src/scraping/acm/index.js | 88 ++++++ src/scraping/acm/package.json | 17 ++ src/scraping/acm/results.txt | 64 ++++ src/scraping/buxton/scraper.py | 331 +++++++++++++++++++++ .../buxton/source/Bill_Notes_Bill_Notes_CyKey.docx | Bin 0 -> 1675500 bytes .../buxton/source/Bill_Notes_Braun_T3.docx | Bin 0 -> 1671968 bytes .../buxton/source/Bill_Notes_CasioC801.docx | Bin 0 -> 574664 bytes .../buxton/source/Bill_Notes_Casio_Mini.docx | Bin 0 -> 581069 bytes .../source/Bill_Notes_FingerWorks_Prototype.docx | Bin 0 -> 585090 bytes .../source/Bill_Notes_Fingerworks_TouchStream.docx | Bin 0 -> 1722555 bytes src/scraping/buxton/source/Bill_Notes_FrogPad.docx | Bin 0 -> 840173 bytes .../buxton/source/Bill_Notes_Gavilan_SC.docx | Bin 0 -> 1695290 bytes .../source/Bill_Notes_Grandjean_Stenotype.docx | Bin 0 -> 2094142 bytes src/scraping/buxton/source/Bill_Notes_Matias.docx | Bin 0 -> 590407 bytes .../buxton/source/Bill_Notes_MousePen.docx | Bin 0 -> 505322 bytes src/scraping/buxton/source/Bill_Notes_NewO.docx | Bin 0 -> 2264571 bytes src/scraping/buxton/source/Bill_Notes_OLPC.docx | Bin 0 -> 6883659 bytes src/scraping/buxton/source/Bill_Notes_PARCkbd.docx | Bin 0 -> 631959 bytes .../source/Bill_Notes_Philco_Mystery_Control.docx | Bin 0 -> 1994439 bytes .../buxton/source/Bill_Notes_TASA_Kbd.docx | Bin 0 -> 461199 bytes src/scraping/buxton/source/Bill_Notes_The_Tap.docx | Bin 0 -> 711321 bytes 43 files changed, 544 insertions(+), 331 deletions(-) delete mode 100644 src/buxton/scraper.py delete mode 100644 src/buxton/source/Bill_Notes_Bill_Notes_CyKey.docx delete mode 100644 src/buxton/source/Bill_Notes_Braun_T3.docx delete mode 100644 src/buxton/source/Bill_Notes_CasioC801.docx delete mode 100644 src/buxton/source/Bill_Notes_Casio_Mini.docx delete mode 100644 src/buxton/source/Bill_Notes_FingerWorks_Prototype.docx delete mode 100644 src/buxton/source/Bill_Notes_Fingerworks_TouchStream.docx delete mode 100644 src/buxton/source/Bill_Notes_FrogPad.docx delete mode 100644 src/buxton/source/Bill_Notes_Gavilan_SC.docx delete mode 100644 src/buxton/source/Bill_Notes_Grandjean_Stenotype.docx delete mode 100644 src/buxton/source/Bill_Notes_Matias.docx delete mode 100644 src/buxton/source/Bill_Notes_MousePen.docx delete mode 100644 src/buxton/source/Bill_Notes_NewO.docx delete mode 100644 src/buxton/source/Bill_Notes_OLPC.docx delete mode 100644 src/buxton/source/Bill_Notes_PARCkbd.docx delete mode 100644 src/buxton/source/Bill_Notes_Philco_Mystery_Control.docx delete mode 100644 src/buxton/source/Bill_Notes_TASA_Kbd.docx delete mode 100644 src/buxton/source/Bill_Notes_The_Tap.docx create mode 100644 src/client/util/ClientUtils.ts create mode 100644 src/scraping/acm/chromedriver.exe create mode 100644 src/scraping/acm/citations.txt create mode 100644 src/scraping/acm/debug.log create mode 100644 src/scraping/acm/index.js create mode 100644 src/scraping/acm/package.json create mode 100644 src/scraping/acm/results.txt create mode 100644 src/scraping/buxton/scraper.py create mode 100644 src/scraping/buxton/source/Bill_Notes_Bill_Notes_CyKey.docx create mode 100644 src/scraping/buxton/source/Bill_Notes_Braun_T3.docx create mode 100644 src/scraping/buxton/source/Bill_Notes_CasioC801.docx create mode 100644 src/scraping/buxton/source/Bill_Notes_Casio_Mini.docx create mode 100644 src/scraping/buxton/source/Bill_Notes_FingerWorks_Prototype.docx create mode 100644 src/scraping/buxton/source/Bill_Notes_Fingerworks_TouchStream.docx create mode 100644 src/scraping/buxton/source/Bill_Notes_FrogPad.docx create mode 100644 src/scraping/buxton/source/Bill_Notes_Gavilan_SC.docx create mode 100644 src/scraping/buxton/source/Bill_Notes_Grandjean_Stenotype.docx create mode 100644 src/scraping/buxton/source/Bill_Notes_Matias.docx create mode 100644 src/scraping/buxton/source/Bill_Notes_MousePen.docx create mode 100644 src/scraping/buxton/source/Bill_Notes_NewO.docx create mode 100644 src/scraping/buxton/source/Bill_Notes_OLPC.docx create mode 100644 src/scraping/buxton/source/Bill_Notes_PARCkbd.docx create mode 100644 src/scraping/buxton/source/Bill_Notes_Philco_Mystery_Control.docx create mode 100644 src/scraping/buxton/source/Bill_Notes_TASA_Kbd.docx create mode 100644 src/scraping/buxton/source/Bill_Notes_The_Tap.docx (limited to 'src') diff --git a/src/buxton/scraper.py b/src/buxton/scraper.py deleted file mode 100644 index 043fd3cf4..000000000 --- a/src/buxton/scraper.py +++ /dev/null @@ -1,331 +0,0 @@ -import os -import docx2txt -from docx import Document -from docx.opc.constants import RELATIONSHIP_TYPE as RT -import re -from pymongo import MongoClient -import shutil -import uuid -import datetime -from PIL import Image -import math -import sys - -source = "./source" -dist = "../server/public/files" - -db = MongoClient("localhost", 27017)["Dash"] -schema_guids = [] - - -def extract_links(fileName): - links = [] - doc = Document(fileName) - rels = doc.part.rels - for rel in rels: - item = rels[rel] - if item.reltype == RT.HYPERLINK and ".aspx" not in item._target: - links.append(item._target) - return listify(links) - - -def extract_value(kv_string): - pieces = kv_string.split(":") - return (pieces[1] if len(pieces) > 1 else kv_string).strip() - - -def mkdir_if_absent(path): - try: - if not os.path.exists(path): - os.mkdir(path) - except OSError: - print("failed to create the appropriate directory structures for %s" % file_name) - - -def guid(): - return str(uuid.uuid4()) - - -def listify(list): - return { - "fields": list, - "__type": "list" - } - - -def protofy(fieldId): - return { - "fieldId": fieldId, - "__type": "proxy" - } - - -def write_schema(parse_results, display_fields): - view_guids = parse_results["child_guids"] - - data_doc = parse_results["schema"] - fields = data_doc["fields"] - - view_doc_guid = guid() - - view_doc = { - "_id": view_doc_guid, - "fields": { - "proto": protofy(data_doc["_id"]), - "x": 10, - "y": 10, - "width": 900, - "height": 600, - "panX": 0, - "panY": 0, - "zoomBasis": 0.5, - "zIndex": 2, - "libraryBrush": False, - "viewType": 2 - }, - "__type": "Doc" - } - - fields["proto"] = protofy("collectionProto") - fields["data"] = listify(proxify_guids(view_guids)) - fields["schemaColumns"] = listify(display_fields) - fields["backgroundColor"] = "white" - fields["scale"] = 0.5 - fields["viewType"] = 2 - fields["author"] = "Bill Buxton" - fields["creationDate"] = { - "date": datetime.datetime.utcnow().microsecond, - "__type": "date" - } - fields["isPrototype"] = True - fields["page"] = -1 - - db.newDocuments.insert_one(data_doc) - db.newDocuments.insert_one(view_doc) - - data_doc_guid = data_doc["_id"] - print(f"inserted view document ({view_doc_guid})") - print(f"inserted data document ({data_doc_guid})\n") - - return view_doc_guid - - -def write_image(folder, name): - path = f"http://localhost:1050/files/{folder}/{name}" - - data_doc_guid = guid() - view_doc_guid = guid() - - view_doc = { - "_id": view_doc_guid, - "fields": { - "proto": protofy(data_doc_guid), - "x": 10, - "y": 10, - "width": 300, - "zIndex": 2, - "libraryBrush": False - }, - "__type": "Doc" - } - - image = Image.open(f"{dist}/{folder}/{name}") - native_width, native_height = image.size - - data_doc = { - "_id": data_doc_guid, - "fields": { - "proto": protofy("imageProto"), - "data": { - "url": path, - "__type": "image" - }, - "title": name, - "nativeWidth": native_width, - "author": "Bill Buxton", - "creationDate": { - "date": datetime.datetime.utcnow().microsecond, - "__type": "date" - }, - "isPrototype": True, - "page": -1, - "nativeHeight": native_height, - "height": native_height - }, - "__type": "Doc" - } - - db.newDocuments.insert_one(view_doc) - db.newDocuments.insert_one(data_doc) - - return view_doc_guid - - -def parse_document(file_name: str): - print(f"parsing {file_name}...") - pure_name = file_name.split(".")[0] - - result = {} - - dir_path = dist + "/" + pure_name - mkdir_if_absent(dir_path) - - raw = str(docx2txt.process(source + "/" + file_name, dir_path)) - - view_guids = [] - count = 0 - for image in os.listdir(dir_path): - count += 1 - view_guids.append(write_image(pure_name, image)) - os.rename(dir_path + "/" + image, dir_path + - "/" + image.replace(".", "_m.", 1)) - print(f"extracted {count} images...") - - def sanitize(line): return re.sub("[\n\t]+", "", line).replace(u"\u00A0", " ").replace( - u"\u2013", "-").replace(u"\u201c", '''"''').replace(u"\u201d", '''"''').strip() - - def sanitize_price(raw: str): - raw = raw.replace(",", "") - start = raw.find("$") - if start > -1: - i = start + 1 - while (i < len(raw) and re.match(r"[0-9\.]", raw[i])): - i += 1 - price = raw[start + 1: i + 1] - return float(price) - elif (raw.lower().find("nfs")): - return -1 - else: - return math.nan - - def remove_empty(line): return len(line) > 1 - - lines = list(map(sanitize, raw.split("\n"))) - lines = list(filter(remove_empty, lines)) - - result["file_name"] = file_name - result["title"] = lines[2].strip() - result["short_description"] = lines[3].strip().replace( - "Short Description: ", "") - - cur = 5 - notes = "" - while lines[cur] != "Device Details": - notes += lines[cur] + " " - cur += 1 - result["buxton_notes"] = notes.strip() - - cur += 1 - clean = list( - map(lambda data: data.strip().split(":"), lines[cur].split("|"))) - result["company"] = clean[0][len(clean[0]) - 1].strip() - result["year"] = clean[1][len(clean[1]) - 1].strip() - result["original_price"] = sanitize_price( - clean[2][len(clean[2]) - 1].strip()) - - cur += 1 - result["degrees_of_freedom"] = extract_value( - lines[cur]).replace("NA", "N/A") - cur += 1 - - dimensions = lines[cur].lower() - if dimensions.startswith("dimensions"): - dim_concat = dimensions[11:].strip() - cur += 1 - while lines[cur] != "Key Words": - dim_concat += (" " + lines[cur].strip()) - cur += 1 - result["dimensions"] = dim_concat - else: - result["dimensions"] = "N/A" - - cur += 1 - result["primary_key"] = extract_value(lines[cur]) - cur += 1 - result["secondary_key"] = extract_value(lines[cur]) - - while lines[cur] != "Links": - result["secondary_key"] += (" " + extract_value(lines[cur]).strip()) - cur += 1 - - cur += 1 - link_descriptions = [] - while lines[cur] != "Image": - link_descriptions.append(lines[cur].strip()) - cur += 1 - result["link_descriptions"] = listify(link_descriptions) - - result["hyperlinks"] = extract_links(source + "/" + file_name) - - images = [] - captions = [] - cur += 3 - while cur + 1 < len(lines) and lines[cur] != "NOTES:": - images.append(lines[cur]) - captions.append(lines[cur + 1]) - cur += 2 - result["images"] = listify(images) - result["captions"] = listify(captions) - - notes = [] - if (cur < len(lines) and lines[cur] == "NOTES:"): - cur += 1 - while cur < len(lines): - notes.append(lines[cur]) - cur += 1 - if len(notes) > 0: - result["notes"] = listify(notes) - - print("writing child schema...") - - return { - "schema": { - "_id": guid(), - "fields": result, - "__type": "Doc" - }, - "child_guids": view_guids - } - - -def proxify_guids(guids): - return list(map(lambda guid: {"fieldId": guid, "__type": "proxy"}, guids)) - - -if os.path.exists(dist): - shutil.rmtree(dist) -while os.path.exists(dist): - pass -os.mkdir(dist) -mkdir_if_absent(source) - -candidates = 0 -for file_name in os.listdir(source): - if file_name.endswith('.docx'): - candidates += 1 - schema_guids.append(write_schema( - parse_document(file_name), ["title", "data"])) - -print("writing parent schema...") -parent_guid = write_schema({ - "schema": { - "_id": guid(), - "fields": {}, - "__type": "Doc" - }, - "child_guids": schema_guids -}, ["title", "short_description", "original_price"]) - -print("appending parent schema to main workspace...\n") -db.newDocuments.update_one( - {"fields.title": "WS collection 1"}, - {"$push": {"fields.data.fields": {"fieldId": parent_guid, "__type": "proxy"}}} -) - -print("rewriting .gitignore...\n") -lines = ['*', '!.gitignore'] -with open(dist + "/.gitignore", 'w') as f: - f.write('\n'.join(lines)) - -suffix = "" if candidates == 1 else "s" -print(f"conversion complete. {candidates} candidate{suffix} processed.") diff --git a/src/buxton/source/Bill_Notes_Bill_Notes_CyKey.docx b/src/buxton/source/Bill_Notes_Bill_Notes_CyKey.docx deleted file mode 100644 index 06094b4d3..000000000 Binary files a/src/buxton/source/Bill_Notes_Bill_Notes_CyKey.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_Braun_T3.docx b/src/buxton/source/Bill_Notes_Braun_T3.docx deleted file mode 100644 index 356697092..000000000 Binary files a/src/buxton/source/Bill_Notes_Braun_T3.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_CasioC801.docx b/src/buxton/source/Bill_Notes_CasioC801.docx deleted file mode 100644 index cd89fb97b..000000000 Binary files a/src/buxton/source/Bill_Notes_CasioC801.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_Casio_Mini.docx b/src/buxton/source/Bill_Notes_Casio_Mini.docx deleted file mode 100644 index a503cddfc..000000000 Binary files a/src/buxton/source/Bill_Notes_Casio_Mini.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_FingerWorks_Prototype.docx b/src/buxton/source/Bill_Notes_FingerWorks_Prototype.docx deleted file mode 100644 index 4d13a8cf5..000000000 Binary files a/src/buxton/source/Bill_Notes_FingerWorks_Prototype.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_Fingerworks_TouchStream.docx b/src/buxton/source/Bill_Notes_Fingerworks_TouchStream.docx deleted file mode 100644 index 578a1be08..000000000 Binary files a/src/buxton/source/Bill_Notes_Fingerworks_TouchStream.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_FrogPad.docx b/src/buxton/source/Bill_Notes_FrogPad.docx deleted file mode 100644 index d01e1bf5c..000000000 Binary files a/src/buxton/source/Bill_Notes_FrogPad.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_Gavilan_SC.docx b/src/buxton/source/Bill_Notes_Gavilan_SC.docx deleted file mode 100644 index 7bd28b376..000000000 Binary files a/src/buxton/source/Bill_Notes_Gavilan_SC.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_Grandjean_Stenotype.docx b/src/buxton/source/Bill_Notes_Grandjean_Stenotype.docx deleted file mode 100644 index 0615c4953..000000000 Binary files a/src/buxton/source/Bill_Notes_Grandjean_Stenotype.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_Matias.docx b/src/buxton/source/Bill_Notes_Matias.docx deleted file mode 100644 index 547603256..000000000 Binary files a/src/buxton/source/Bill_Notes_Matias.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_MousePen.docx b/src/buxton/source/Bill_Notes_MousePen.docx deleted file mode 100644 index 4e1056636..000000000 Binary files a/src/buxton/source/Bill_Notes_MousePen.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_NewO.docx b/src/buxton/source/Bill_Notes_NewO.docx deleted file mode 100644 index a514926d2..000000000 Binary files a/src/buxton/source/Bill_Notes_NewO.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_OLPC.docx b/src/buxton/source/Bill_Notes_OLPC.docx deleted file mode 100644 index bfca0a9bb..000000000 Binary files a/src/buxton/source/Bill_Notes_OLPC.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_PARCkbd.docx b/src/buxton/source/Bill_Notes_PARCkbd.docx deleted file mode 100644 index c0cf6ba9a..000000000 Binary files a/src/buxton/source/Bill_Notes_PARCkbd.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_Philco_Mystery_Control.docx b/src/buxton/source/Bill_Notes_Philco_Mystery_Control.docx deleted file mode 100644 index ad06903f3..000000000 Binary files a/src/buxton/source/Bill_Notes_Philco_Mystery_Control.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_TASA_Kbd.docx b/src/buxton/source/Bill_Notes_TASA_Kbd.docx deleted file mode 100644 index e4c659de9..000000000 Binary files a/src/buxton/source/Bill_Notes_TASA_Kbd.docx and /dev/null differ diff --git a/src/buxton/source/Bill_Notes_The_Tap.docx b/src/buxton/source/Bill_Notes_The_Tap.docx deleted file mode 100644 index 8ceebc71e..000000000 Binary files a/src/buxton/source/Bill_Notes_The_Tap.docx and /dev/null differ diff --git a/src/client/util/ClientUtils.ts b/src/client/util/ClientUtils.ts new file mode 100644 index 000000000..425bde14a --- /dev/null +++ b/src/client/util/ClientUtils.ts @@ -0,0 +1,4 @@ +//AUTO-GENERATED FILE: DO NOT EDIT +export namespace ClientUtils { + export const RELEASE = false; +} \ No newline at end of file diff --git a/src/scraping/acm/chromedriver.exe b/src/scraping/acm/chromedriver.exe new file mode 100644 index 000000000..6a362fd43 Binary files /dev/null and b/src/scraping/acm/chromedriver.exe differ diff --git a/src/scraping/acm/citations.txt b/src/scraping/acm/citations.txt new file mode 100644 index 000000000..e5018ddef --- /dev/null +++ b/src/scraping/acm/citations.txt @@ -0,0 +1,2 @@ +321046 +2412979 \ No newline at end of file diff --git a/src/scraping/acm/debug.log b/src/scraping/acm/debug.log new file mode 100644 index 000000000..8c0a148f4 --- /dev/null +++ b/src/scraping/acm/debug.log @@ -0,0 +1,38 @@ +[0625/170004.768:ERROR:process_reader_win.cc(123)] NtOpenThread: {Access Denied} A process has requested access to an object, but has not been granted those access rights. (0xc0000022) +[0625/170004.769:ERROR:exception_snapshot_win.cc(98)] thread ID 17604 not found in process +[0625/171124.644:ERROR:process_reader_win.cc(123)] NtOpenThread: {Access Denied} A process has requested access to an object, but has not been granted those access rights. (0xc0000022) +[0625/171124.645:ERROR:exception_snapshot_win.cc(98)] thread ID 14348 not found in process +[0625/171853.989:ERROR:process_reader_win.cc(123)] NtOpenThread: {Access Denied} A process has requested access to an object, but has not been granted those access rights. (0xc0000022) +[0625/171853.990:ERROR:exception_snapshot_win.cc(98)] thread ID 12080 not found in process +[0625/171947.744:ERROR:process_reader_win.cc(123)] NtOpenThread: {Access Denied} A process has requested access to an object, but has not been granted those access rights. (0xc0000022) +[0625/171947.745:ERROR:exception_snapshot_win.cc(98)] thread ID 16160 not found in process +[0625/172007.424:ERROR:process_reader_win.cc(123)] NtOpenThread: {Access Denied} A process has requested access to an object, but has not been granted those access rights. (0xc0000022) +[0625/172007.425:ERROR:exception_snapshot_win.cc(98)] thread ID 13472 not found in process +[0625/172059.353:ERROR:process_reader_win.cc(123)] NtOpenThread: {Access Denied} A process has requested access to an object, but has not been granted those access rights. (0xc0000022) +[0625/172059.354:ERROR:exception_snapshot_win.cc(98)] thread ID 6396 not found in process +[0625/172402.795:ERROR:process_reader_win.cc(123)] NtOpenThread: {Access Denied} A process has requested access to an object, but has not been granted those access rights. (0xc0000022) +[0625/172402.796:ERROR:exception_snapshot_win.cc(98)] thread ID 10720 not found in process +[0625/172618.850:ERROR:process_reader_win.cc(123)] NtOpenThread: {Access Denied} A process has requested access to an object, but has not been granted those access rights. (0xc0000022) +[0625/172618.850:ERROR:exception_snapshot_win.cc(98)] thread ID 21136 not found in process +[0625/172819.875:ERROR:process_reader_win.cc(123)] NtOpenThread: {Access Denied} A process has requested access to an object, but has not been granted those access rights. (0xc0000022) +[0625/172819.876:ERROR:exception_snapshot_win.cc(98)] thread ID 17624 not found in process +[0625/172953.674:ERROR:process_reader_win.cc(123)] NtOpenThread: {Access Denied} A process has requested access to an object, but has not been granted those access rights. (0xc0000022) +[0625/172953.675:ERROR:exception_snapshot_win.cc(98)] thread ID 15180 not found in process +[0625/173412.182:ERROR:process_reader_win.cc(123)] NtOpenThread: {Access Denied} A process has requested access to an object, but has not been granted those access rights. (0xc0000022) +[0625/173412.182:ERROR:exception_snapshot_win.cc(98)] thread ID 13952 not found in process +[0625/173447.806:ERROR:process_reader_win.cc(123)] NtOpenThread: {Access Denied} A process has requested access to an object, but has not been granted those access rights. (0xc0000022) +[0625/173447.807:ERROR:exception_snapshot_win.cc(98)] thread ID 1572 not found in process +[0625/173516.188:ERROR:process_reader_win.cc(123)] NtOpenThread: {Access Denied} A process has requested access to an object, but has not been granted those access rights. (0xc0000022) +[0625/173516.189:ERROR:exception_snapshot_win.cc(98)] thread ID 5472 not found in process +[0625/173528.446:ERROR:process_reader_win.cc(123)] NtOpenThread: {Access Denied} A process has requested access to an object, but has not been granted those access rights. (0xc0000022) +[0625/173528.447:ERROR:exception_snapshot_win.cc(98)] thread ID 20420 not found in process +[0625/173539.436:ERROR:process_reader_win.cc(123)] NtOpenThread: {Access Denied} A process has requested access to an object, but has not been granted those access rights. (0xc0000022) +[0625/173539.437:ERROR:exception_snapshot_win.cc(98)] thread ID 16192 not found in process +[0625/173643.139:ERROR:process_reader_win.cc(123)] NtOpenThread: {Access Denied} A process has requested access to an object, but has not been granted those access rights. (0xc0000022) +[0625/173643.140:ERROR:exception_snapshot_win.cc(98)] thread ID 15716 not found in process +[0625/173659.376:ERROR:process_reader_win.cc(123)] NtOpenThread: {Access Denied} A process has requested access to an object, but has not been granted those access rights. (0xc0000022) +[0625/173659.377:ERROR:exception_snapshot_win.cc(98)] thread ID 11828 not found in process +[0625/201137.209:ERROR:process_reader_win.cc(123)] NtOpenThread: {Access Denied} A process has requested access to an object, but has not been granted those access rights. (0xc0000022) +[0625/201137.210:ERROR:exception_snapshot_win.cc(98)] thread ID 7688 not found in process +[0625/210240.476:ERROR:process_reader_win.cc(123)] NtOpenThread: {Access Denied} A process has requested access to an object, but has not been granted those access rights. (0xc0000022) +[0625/210240.477:ERROR:exception_snapshot_win.cc(98)] thread ID 20828 not found in process diff --git a/src/scraping/acm/index.js b/src/scraping/acm/index.js new file mode 100644 index 000000000..81f775617 --- /dev/null +++ b/src/scraping/acm/index.js @@ -0,0 +1,88 @@ +const { Builder, By, Key, until } = require('selenium-webdriver'); +const fs = require("fs"); + +let driver; +fs.readFile("./citations.txt", { encoding: "utf8" }, scrapeTargets); +results = [] + +async function scrapeTargets(error, data) { + if (error) { + console.log("\nUnable to collect target citations from a citations.txt file stored in this directory.\nPlease make sure one is provided.") + return; + } + + driver = await new Builder().forBrowser('chrome').build(); + + let references = data.split("\n").map(entry => entry.replace("\r", "")).filter(line => line.match(/\d+/g)); + + let results = [] + let pdfs = [] + for (let id of references) { + let result = {} + let lines = [] + try { + let url = `https://dl.acm.org/citation.cfm?id=${id}`; + await driver.get(url); + await driver.sleep(500) + let candidates = await driver.findElements(By.xpath('.//a[contains(@href, "ft_gateway.cfm?id=")]')); + if (candidates.length > 0) { + pdfs.push(candidates[0]) + } + let webElements = await driver.findElements(By.id("abstract-body")) + for (let el of webElements) { + let text = await el.getText() + lines.push(text) + } + result.url = url + result.abstract = lines.join(" "); + await driver.findElement(By.xpath(`//*[@id="tab-1014-btnInnerEl"]/span`)).click() + await driver.sleep(500) + let authors = await driver.findElement(By.xpath('//*[@id="tabpanel-1009-body"]')).getText() + let sanitize = line => line.length > 0 && !(line.startsWith("No contact information") || line.startsWith("View colleagues of") || line.startsWith("Bibliometrics:")) + authorLines = authors.split("\n").map(line => line.trim()).filter(sanitize) + + let i = 0; + let allAuthors = [] + while (i < authorLines.length) { + let individual = []; + while (!authorLines[i].startsWith("Average citations")) { + individual.push(authorLines[i]) + i++ + } + individual.push(authorLines[i]) + allAuthors.push(individual); + i++ + } + result.authors = allAuthors.map(metadata => { + let publicationYears = metadata[1].substring(18).split("-"); + author = { + name: metadata[0], + publication_start: parseInt(publicationYears[0]), + publication_end: parseInt(publicationYears[1]) + }; + for (let count = 2; count < metadata.length; count++) { + let attr = metadata[count] + let char = attr.length - 1; + while (attr[char] != " ") { + char-- + } + let key = attr.substring(0, char).toLowerCase().replace(/ /g, "_").replace(/[\(\)]/g, ""); + let value = parseFloat(attr.substring(char + 1).replace(/,/g, "")); + author[key] = value + } + return author + }) + } catch (e) { + console.log(e) + await driver.quit(); + } + results.push(result) + } + + let output = ""; + results.forEach(res => output += (JSON.stringify(res, null, 4) + "\n")); + + fs.writeFile("./results.txt", output, function errorHandler(exception) { console.log(exception || "results successfully written") }) + + await driver.quit(); +} \ No newline at end of file diff --git a/src/scraping/acm/package.json b/src/scraping/acm/package.json new file mode 100644 index 000000000..10f4d2156 --- /dev/null +++ b/src/scraping/acm/package.json @@ -0,0 +1,17 @@ +{ + "name": "scraper", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "axios": "^0.19.0", + "cheerio": "^1.0.0-rc.3", + "selenium-webdriver": "^4.0.0-alpha.4" + } +} diff --git a/src/scraping/acm/results.txt b/src/scraping/acm/results.txt new file mode 100644 index 000000000..05bb2be8b --- /dev/null +++ b/src/scraping/acm/results.txt @@ -0,0 +1,64 @@ +{ + "url": "https://dl.acm.org/citation.cfm?id=321046", + "abstract": "It has been observed by many people that a striking number of quite diverse mathematical problems can be formulated as problems in integer programming, that is, linear programming problems in which some or all of the variables are required to assume integral values. This fact is rendered quite interesting by recent research on such problems, notably by R. E. Gomory [2, 3], which gives promise of yielding efficient computational techniques for their solution. The present paper provides yet another example of the versatility of integer programming as a mathematical modeling device by representing a generalization of the well-known “Travelling Salesman Problem” in integer programming terms. The authors have developed several such models, of which the one presented here is the most efficient in terms of generality, number of variables, and number of constraints. This model is due to the second author [4] and was presented briefly at the Symposium on Combinatorial Problems held at Princeton University, April 1960, sponsored by SIAM and IBM. The problem treated is: (1) A salesman is required to visit each of n cities, indexed by 1, … , n. He leaves from a “base city” indexed by 0, visits each of the n other cities exactly once, and returns to city 0. During his travels he must return to 0 exactly t times, including his final return (here t may be allowed to vary), and he must visit no more than p cities in one tour. (By a tour we mean a succession of visits to cities without stopping at city 0.) It is required to find such an itinerary which minimizes the total distance traveled by the salesman. Note that if t is fixed, then for the problem to have a solution we must have tp ≧ n. For t = 1, p ≧ n, we have the standard traveling salesman problem. Let dij (i ≠ j = 0, 1, … , n) be the distance covered in traveling from city i to city j. The following integer programming problem will be shown to be equivalent to (1): (2) Minimize the linear form ∑0≦i≠j≦n∑ dijxij over the set determined by the relations ∑ni=0i≠j xij = 1 (j = 1, … , n) ∑nj=0j≠i xij = 1 (i = 1, … , n) ui - uj + pxij ≦ p - 1 (1 ≦ i ≠ j ≦ n) where the xij are non-negative integers and the ui (i = 1, …, n) are arbitrary real numbers. (We shall see that it is permissible to restrict the ui to be non-negative integers as well.) If t is fixed it is necessary to add the additional relation: ∑nu=1 xi0 = t Note that the constraints require that xij = 0 or 1, so that a natural correspondence between these two problems exists if the xij are interpreted as follows: The salesman proceeds from city i to city j if and only if xij = 1. Under this correspondence the form to be minimized in (2) is the total distance to be traveled by the salesman in (1), so the burden of proof is to show that the two feasible sets correspond; i.e., a feasible solution to (2) has xij which do define a legitimate itinerary in (1), and, conversely a legitimate itinerary in (1) defines xij, which, together with appropriate ui, satisfy the constraints of (2). Consider a feasible solution to (2). The number of returns to city 0 is given by ∑ni=1 xi0. The constraints of the form ∑ xij = 1, all xij non-negative integers, represent the conditions that each city (other than zero) is visited exactly once. The ui play a role similar to node potentials in a network and the inequalities involving them serve to eliminate tours that do not begin and end at city 0 and tours that visit more than p cities. Consider any xr0r1 = 1 (r1 ≠ 0). There exists a unique r2 such that xr1r2 = 1. Unless r2 = 0, there is a unique r3 with xr2r3 = 1. We proceed in this fashion until some rj = 0. This must happen since the alternative is that at some point we reach an rk = rj, j + 1 < k. Since none of the r's are zero we have uri - uri + 1 + pxriri + 1 ≦ p - 1 or uri - uri + 1 ≦ - 1. Summing from i = j to k - 1, we have urj - urk = 0 ≦ j + 1 - k, which is a contradiction. Thus all tours include city 0. It remains to observe that no tours is of length greater than p. Suppose such a tour exists, x0r1 , xr1r2 , … , xrprp+1 = 1 with all ri ≠ 0. Then, as before, ur1 - urp+1 ≦ - p or urp+1 - ur1 ≧ p. But we have urp+1 - ur1 + pxrp+1r1 ≦ p - 1 or urp+1 - ur1 ≦ p (1 - xrp+1r1) - 1 ≦ p - 1, which is a contradiction. Conversely, if the xij correspond to a legitimate itinerary, it is clear that the ui can be adjusted so that ui = j if city i is the jth city visited in the tour which includes city i, for we then have ui - uj = - 1 if xij = 1, and always ui - uj ≦ p - 1. The above integer program involves n2 + n constraints (if t is not fixed) in n2 + 2n variables. Since the inequality form of constraint is fundamental for integer programming calculations, one may eliminate 2n variables, say the xi0 and x0j, by means of the equation constraints and produce an equivalent problem with n2 + n inequalities and n2 variables. The currently known integer programming procedures are sufficiently regular in their behavior to cast doubt on the heuristic value of machine experiments with our model. However, it seems appropriate to report the results of the five machine experiments we have conducted so far. The solution procedure used was the all-integer algorithm of R. E. Gomory [3] without the ranking procedure he describes. The first three experiments were simple model verification tests on a four-city standard traveling salesman problem with distance matrix [ 20 23 4 30 7 27 25 5 25 3 21 26 ] The first experiment was with a model, now obsolete, using roughly twice as many constraints and variables as the current model (for this problem, 28 constraints in 21 variables). The machine was halted after 4000 pivot steps had failed to produce a solution. The second experiment used the earlier model with the xi0 and x0j eliminated, resulting in a 28-constraint, 15-variable problem. Here the machine produced the optimal solution in 41 pivot steps. The third experiment used the current formulation with the xi0 and x0j eliminated, yielding 13 constraints and 9 variables. The optimal solution was reached in 7 pivot steps. The fourth and fifth experiments were used on a standard ten-city problem, due to Barachet, solved by Dantzig, Johnson and Fulkerson [1]. The current formulation was used, yielding 91 constraints in 81 variables. The fifth problem differed from the fourth only in that the ordering of the rows was altered to attempt to introduce more favorable pivot choices. In each case the machine was stopped after over 250 pivot steps had failed to produce the solution. In each case the last 100 pivot steps had failed to change the value of the objective function. It seems hopeful that more efficient integer programming procedures now under development will yield a satisfactory algorithmic solution to the traveling salesman problem, when applied to this model. In any case, the model serves to illustrate how problems of this sort may be succinctly formulated in integer programming terms.", + "authors": [ + { + "name": "C. E. Miller", + "publication_start": 1960, + "publication_end": 1960, + "publication_count": 1, + "citation_count": 179, + "available_for_download": 1, + "downloads_6_weeks": 132, + "downloads_12_months": 993, + "downloads_cumulative": 9781, + "average_downloads_per_article": 9781, + "average_citations_per_article": 179 + }, + { + "name": "A. W. Tucker", + "publication_start": 1960, + "publication_end": 1993, + "publication_count": 5, + "citation_count": 196, + "available_for_download": 1, + "downloads_6_weeks": 132, + "downloads_12_months": 993, + "downloads_cumulative": 9781, + "average_downloads_per_article": 9781, + "average_citations_per_article": 39.2 + }, + { + "name": "R. A. Zemlin", + "publication_start": 1960, + "publication_end": 1964, + "publication_count": 2, + "citation_count": 188, + "available_for_download": 2, + "downloads_6_weeks": 132, + "downloads_12_months": 998, + "downloads_cumulative": 10012, + "average_downloads_per_article": 5006, + "average_citations_per_article": 94 + } + ] +} +{ + "url": "https://dl.acm.org/citation.cfm?id=2412979", + "abstract": "The STRUCT system utilizes the flexibility of a powerful graphics display system to provide a set of tools for program analysis. These tools allow the analysis of the static prograin structure and the dynamic execution behavior. of programs within the entire operating system/user program environment of the Brown University Graphics System (BUGS). Information is collected and presented in a manner which fully exploits two aspects of this environment. First, the operating system has been developed in a well-structured hierarcal manner following principles laid down by other researchers (2), (3). Second the programs under analysis have been written in a structured programming language following coding conventions which make available, at the source code level, valuable program control information. A new set of pictorial constructs is introduced for presenting a. program structure (static or dynamic) for inspection. These constructs combine the best features of an indented structured source code listing and the box odented nature of traditional flow charts. The graphical tools available are USed to provide for swift changes in. the desired level of detail displayed within a program structure, for traveling linearly through a program structure, for traveling through a complex program structure (following subroutine or system calls), for concurrently viewing multiple related program structures, and for presenting dynamic program behavior data using three-dimensional projections, The volume of a three-dimensional box representing a program block is proportional to the block's resource utilization. The scope of this paper is limited to a description of the STRUCT system. This system is currently being used to predict and analyze the performance advantages available through the migration of function (program modules) between levels of software and between software and firmware within BUGS. The results of this research on migration will be included in a doctoral dissertation currently being written.", + "authors": [ + { + "name": "Andries Van Dam", + "publication_start": 1975, + "publication_end": 1975, + "publication_count": 1, + "citation_count": 0, + "available_for_download": 0, + "downloads_6_weeks": 8, + "downloads_12_months": 97, + "downloads_cumulative": 97, + "average_downloads_per_article": 0, + "average_citations_per_article": 0 + } + ] +} diff --git a/src/scraping/buxton/scraper.py b/src/scraping/buxton/scraper.py new file mode 100644 index 000000000..97af10519 --- /dev/null +++ b/src/scraping/buxton/scraper.py @@ -0,0 +1,331 @@ +import os +import docx2txt +from docx import Document +from docx.opc.constants import RELATIONSHIP_TYPE as RT +import re +from pymongo import MongoClient +import shutil +import uuid +import datetime +from PIL import Image +import math +import sys + +source = "./source" +dist = "../../server/public/files" + +db = MongoClient("localhost", 27017)["Dash"] +schema_guids = [] + + +def extract_links(fileName): + links = [] + doc = Document(fileName) + rels = doc.part.rels + for rel in rels: + item = rels[rel] + if item.reltype == RT.HYPERLINK and ".aspx" not in item._target: + links.append(item._target) + return listify(links) + + +def extract_value(kv_string): + pieces = kv_string.split(":") + return (pieces[1] if len(pieces) > 1 else kv_string).strip() + + +def mkdir_if_absent(path): + try: + if not os.path.exists(path): + os.mkdir(path) + except OSError: + print("failed to create the appropriate directory structures for %s" % file_name) + + +def guid(): + return str(uuid.uuid4()) + + +def listify(list): + return { + "fields": list, + "__type": "list" + } + + +def protofy(fieldId): + return { + "fieldId": fieldId, + "__type": "proxy" + } + + +def write_schema(parse_results, display_fields): + view_guids = parse_results["child_guids"] + + data_doc = parse_results["schema"] + fields = data_doc["fields"] + + view_doc_guid = guid() + + view_doc = { + "_id": view_doc_guid, + "fields": { + "proto": protofy(data_doc["_id"]), + "x": 10, + "y": 10, + "width": 900, + "height": 600, + "panX": 0, + "panY": 0, + "zoomBasis": 0.5, + "zIndex": 2, + "libraryBrush": False, + "viewType": 2 + }, + "__type": "Doc" + } + + fields["proto"] = protofy("collectionProto") + fields["data"] = listify(proxify_guids(view_guids)) + fields["schemaColumns"] = listify(display_fields) + fields["backgroundColor"] = "white" + fields["scale"] = 0.5 + fields["viewType"] = 2 + fields["author"] = "Bill Buxton" + fields["creationDate"] = { + "date": datetime.datetime.utcnow().microsecond, + "__type": "date" + } + fields["isPrototype"] = True + fields["page"] = -1 + + db.newDocuments.insert_one(data_doc) + db.newDocuments.insert_one(view_doc) + + data_doc_guid = data_doc["_id"] + print(f"inserted view document ({view_doc_guid})") + print(f"inserted data document ({data_doc_guid})\n") + + return view_doc_guid + + +def write_image(folder, name): + path = f"http://localhost:1050/files/{folder}/{name}" + + data_doc_guid = guid() + view_doc_guid = guid() + + view_doc = { + "_id": view_doc_guid, + "fields": { + "proto": protofy(data_doc_guid), + "x": 10, + "y": 10, + "width": 300, + "zIndex": 2, + "libraryBrush": False + }, + "__type": "Doc" + } + + image = Image.open(f"{dist}/{folder}/{name}") + native_width, native_height = image.size + + data_doc = { + "_id": data_doc_guid, + "fields": { + "proto": protofy("imageProto"), + "data": { + "url": path, + "__type": "image" + }, + "title": name, + "nativeWidth": native_width, + "author": "Bill Buxton", + "creationDate": { + "date": datetime.datetime.utcnow().microsecond, + "__type": "date" + }, + "isPrototype": True, + "page": -1, + "nativeHeight": native_height, + "height": native_height + }, + "__type": "Doc" + } + + db.newDocuments.insert_one(view_doc) + db.newDocuments.insert_one(data_doc) + + return view_doc_guid + + +def parse_document(file_name: str): + print(f"parsing {file_name}...") + pure_name = file_name.split(".")[0] + + result = {} + + dir_path = dist + "/" + pure_name + mkdir_if_absent(dir_path) + + raw = str(docx2txt.process(source + "/" + file_name, dir_path)) + + view_guids = [] + count = 0 + for image in os.listdir(dir_path): + count += 1 + view_guids.append(write_image(pure_name, image)) + os.rename(dir_path + "/" + image, dir_path + + "/" + image.replace(".", "_m.", 1)) + print(f"extracted {count} images...") + + def sanitize(line): return re.sub("[\n\t]+", "", line).replace(u"\u00A0", " ").replace( + u"\u2013", "-").replace(u"\u201c", '''"''').replace(u"\u201d", '''"''').strip() + + def sanitize_price(raw: str): + raw = raw.replace(",", "") + start = raw.find("$") + if start > -1: + i = start + 1 + while (i < len(raw) and re.match(r"[0-9\.]", raw[i])): + i += 1 + price = raw[start + 1: i + 1] + return float(price) + elif (raw.lower().find("nfs")): + return -1 + else: + return math.nan + + def remove_empty(line): return len(line) > 1 + + lines = list(map(sanitize, raw.split("\n"))) + lines = list(filter(remove_empty, lines)) + + result["file_name"] = file_name + result["title"] = lines[2].strip() + result["short_description"] = lines[3].strip().replace( + "Short Description: ", "") + + cur = 5 + notes = "" + while lines[cur] != "Device Details": + notes += lines[cur] + " " + cur += 1 + result["buxton_notes"] = notes.strip() + + cur += 1 + clean = list( + map(lambda data: data.strip().split(":"), lines[cur].split("|"))) + result["company"] = clean[0][len(clean[0]) - 1].strip() + result["year"] = clean[1][len(clean[1]) - 1].strip() + result["original_price"] = sanitize_price( + clean[2][len(clean[2]) - 1].strip()) + + cur += 1 + result["degrees_of_freedom"] = extract_value( + lines[cur]).replace("NA", "N/A") + cur += 1 + + dimensions = lines[cur].lower() + if dimensions.startswith("dimensions"): + dim_concat = dimensions[11:].strip() + cur += 1 + while lines[cur] != "Key Words": + dim_concat += (" " + lines[cur].strip()) + cur += 1 + result["dimensions"] = dim_concat + else: + result["dimensions"] = "N/A" + + cur += 1 + result["primary_key"] = extract_value(lines[cur]) + cur += 1 + result["secondary_key"] = extract_value(lines[cur]) + + while lines[cur] != "Links": + result["secondary_key"] += (" " + extract_value(lines[cur]).strip()) + cur += 1 + + cur += 1 + link_descriptions = [] + while lines[cur] != "Image": + link_descriptions.append(lines[cur].strip()) + cur += 1 + result["link_descriptions"] = listify(link_descriptions) + + result["hyperlinks"] = extract_links(source + "/" + file_name) + + images = [] + captions = [] + cur += 3 + while cur + 1 < len(lines) and lines[cur] != "NOTES:": + images.append(lines[cur]) + captions.append(lines[cur + 1]) + cur += 2 + result["images"] = listify(images) + result["captions"] = listify(captions) + + notes = [] + if (cur < len(lines) and lines[cur] == "NOTES:"): + cur += 1 + while cur < len(lines): + notes.append(lines[cur]) + cur += 1 + if len(notes) > 0: + result["notes"] = listify(notes) + + print("writing child schema...") + + return { + "schema": { + "_id": guid(), + "fields": result, + "__type": "Doc" + }, + "child_guids": view_guids + } + + +def proxify_guids(guids): + return list(map(lambda guid: {"fieldId": guid, "__type": "proxy"}, guids)) + + +if os.path.exists(dist): + shutil.rmtree(dist) +while os.path.exists(dist): + pass +os.mkdir(dist) +mkdir_if_absent(source) + +candidates = 0 +for file_name in os.listdir(source): + if file_name.endswith('.docx'): + candidates += 1 + schema_guids.append(write_schema( + parse_document(file_name), ["title", "data"])) + +print("writing parent schema...") +parent_guid = write_schema({ + "schema": { + "_id": guid(), + "fields": {}, + "__type": "Doc" + }, + "child_guids": schema_guids +}, ["title", "short_description", "original_price"]) + +print("appending parent schema to main workspace...\n") +db.newDocuments.update_one( + {"fields.title": "WS collection 1"}, + {"$push": {"fields.data.fields": {"fieldId": parent_guid, "__type": "proxy"}}} +) + +print("rewriting .gitignore...\n") +lines = ['*', '!.gitignore'] +with open(dist + "/.gitignore", 'w') as f: + f.write('\n'.join(lines)) + +suffix = "" if candidates == 1 else "s" +print(f"conversion complete. {candidates} candidate{suffix} processed.") diff --git a/src/scraping/buxton/source/Bill_Notes_Bill_Notes_CyKey.docx b/src/scraping/buxton/source/Bill_Notes_Bill_Notes_CyKey.docx new file mode 100644 index 000000000..06094b4d3 Binary files /dev/null and b/src/scraping/buxton/source/Bill_Notes_Bill_Notes_CyKey.docx differ diff --git a/src/scraping/buxton/source/Bill_Notes_Braun_T3.docx b/src/scraping/buxton/source/Bill_Notes_Braun_T3.docx new file mode 100644 index 000000000..356697092 Binary files /dev/null and b/src/scraping/buxton/source/Bill_Notes_Braun_T3.docx differ diff --git a/src/scraping/buxton/source/Bill_Notes_CasioC801.docx b/src/scraping/buxton/source/Bill_Notes_CasioC801.docx new file mode 100644 index 000000000..cd89fb97b Binary files /dev/null and b/src/scraping/buxton/source/Bill_Notes_CasioC801.docx differ diff --git a/src/scraping/buxton/source/Bill_Notes_Casio_Mini.docx b/src/scraping/buxton/source/Bill_Notes_Casio_Mini.docx new file mode 100644 index 000000000..a503cddfc Binary files /dev/null and b/src/scraping/buxton/source/Bill_Notes_Casio_Mini.docx differ diff --git a/src/scraping/buxton/source/Bill_Notes_FingerWorks_Prototype.docx b/src/scraping/buxton/source/Bill_Notes_FingerWorks_Prototype.docx new file mode 100644 index 000000000..4d13a8cf5 Binary files /dev/null and b/src/scraping/buxton/source/Bill_Notes_FingerWorks_Prototype.docx differ diff --git a/src/scraping/buxton/source/Bill_Notes_Fingerworks_TouchStream.docx b/src/scraping/buxton/source/Bill_Notes_Fingerworks_TouchStream.docx new file mode 100644 index 000000000..578a1be08 Binary files /dev/null and b/src/scraping/buxton/source/Bill_Notes_Fingerworks_TouchStream.docx differ diff --git a/src/scraping/buxton/source/Bill_Notes_FrogPad.docx b/src/scraping/buxton/source/Bill_Notes_FrogPad.docx new file mode 100644 index 000000000..d01e1bf5c Binary files /dev/null and b/src/scraping/buxton/source/Bill_Notes_FrogPad.docx differ diff --git a/src/scraping/buxton/source/Bill_Notes_Gavilan_SC.docx b/src/scraping/buxton/source/Bill_Notes_Gavilan_SC.docx new file mode 100644 index 000000000..7bd28b376 Binary files /dev/null and b/src/scraping/buxton/source/Bill_Notes_Gavilan_SC.docx differ diff --git a/src/scraping/buxton/source/Bill_Notes_Grandjean_Stenotype.docx b/src/scraping/buxton/source/Bill_Notes_Grandjean_Stenotype.docx new file mode 100644 index 000000000..0615c4953 Binary files /dev/null and b/src/scraping/buxton/source/Bill_Notes_Grandjean_Stenotype.docx differ diff --git a/src/scraping/buxton/source/Bill_Notes_Matias.docx b/src/scraping/buxton/source/Bill_Notes_Matias.docx new file mode 100644 index 000000000..547603256 Binary files /dev/null and b/src/scraping/buxton/source/Bill_Notes_Matias.docx differ diff --git a/src/scraping/buxton/source/Bill_Notes_MousePen.docx b/src/scraping/buxton/source/Bill_Notes_MousePen.docx new file mode 100644 index 000000000..4e1056636 Binary files /dev/null and b/src/scraping/buxton/source/Bill_Notes_MousePen.docx differ diff --git a/src/scraping/buxton/source/Bill_Notes_NewO.docx b/src/scraping/buxton/source/Bill_Notes_NewO.docx new file mode 100644 index 000000000..a514926d2 Binary files /dev/null and b/src/scraping/buxton/source/Bill_Notes_NewO.docx differ diff --git a/src/scraping/buxton/source/Bill_Notes_OLPC.docx b/src/scraping/buxton/source/Bill_Notes_OLPC.docx new file mode 100644 index 000000000..bfca0a9bb Binary files /dev/null and b/src/scraping/buxton/source/Bill_Notes_OLPC.docx differ diff --git a/src/scraping/buxton/source/Bill_Notes_PARCkbd.docx b/src/scraping/buxton/source/Bill_Notes_PARCkbd.docx new file mode 100644 index 000000000..c0cf6ba9a Binary files /dev/null and b/src/scraping/buxton/source/Bill_Notes_PARCkbd.docx differ diff --git a/src/scraping/buxton/source/Bill_Notes_Philco_Mystery_Control.docx b/src/scraping/buxton/source/Bill_Notes_Philco_Mystery_Control.docx new file mode 100644 index 000000000..ad06903f3 Binary files /dev/null and b/src/scraping/buxton/source/Bill_Notes_Philco_Mystery_Control.docx differ diff --git a/src/scraping/buxton/source/Bill_Notes_TASA_Kbd.docx b/src/scraping/buxton/source/Bill_Notes_TASA_Kbd.docx new file mode 100644 index 000000000..e4c659de9 Binary files /dev/null and b/src/scraping/buxton/source/Bill_Notes_TASA_Kbd.docx differ diff --git a/src/scraping/buxton/source/Bill_Notes_The_Tap.docx b/src/scraping/buxton/source/Bill_Notes_The_Tap.docx new file mode 100644 index 000000000..8ceebc71e Binary files /dev/null and b/src/scraping/buxton/source/Bill_Notes_The_Tap.docx differ -- cgit v1.2.3-70-g09d2 From 892608273cdfeba4cfb55c5c604bee4361b3be0e Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 10 Jul 2019 11:53:00 -0400 Subject: documentation tweaks --- src/client/DocServer.ts | 13 +++++++------ src/client/documents/Documents.ts | 6 +++--- src/client/views/collections/CollectionDockingView.tsx | 8 ++++---- 3 files changed, 14 insertions(+), 13 deletions(-) (limited to 'src') diff --git a/src/client/DocServer.ts b/src/client/DocServer.ts index 3b33657eb..067d89c51 100644 --- a/src/client/DocServer.ts +++ b/src/client/DocServer.ts @@ -146,9 +146,10 @@ export namespace DocServer { const promises: Promise>[] = []; const map: { [id: string]: Opt } = {}; - // 1) an initial pass through the cache to determine which documents need to be fetched, - // which are already in the process of being fetched and which already exist in the - // cache + // 1) an initial pass through the cache to determine + // i) which documents need to be fetched + // ii) which are already in the process of being fetched + // iii) which already exist in the cache for (const id of ids) { const cached = _cache[id]; if (cached === undefined) { @@ -204,7 +205,7 @@ export namespace DocServer { // 6) with this confidence, we can now go through and update the cache at the ids of the fields that // we explicitly had to fetch. To finish it off, we add whatever value we've come up with for a given - // id to the soon to be returned field mapping. + // id to the soon-to-be-returned field mapping. requestedIds.forEach(id => { const field = fields[id]; // either way, overwrite or delete any promises (that we inserted as flags @@ -232,8 +233,8 @@ export namespace DocServer { // now, we return our completed mapping from all of the ids that were passed into the method // to their actual RefField | undefined values. This return value either becomes the input - // argument to the caller's promise (i.e. GetRefFields.then(map => //do something with map...)) - // or it is the direct return result if the promise is awaited. + // argument to the caller's promise (i.e. GetRefFields(["_id1_", "_id2_", "_id3_"]).then(map => //do something with map...)) + // or it is the direct return result if the promise is awaited (i.e. let fields = await GetRefFields(["_id1_", "_id2_", "_id3_"])). return map; } diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index 758291b9b..782c133ff 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -96,7 +96,7 @@ export namespace Docs { */ export async function initialize(): Promise { // non-guid string ids for each document prototype - let protoIds = [textProtoId, histoProtoId, collProtoId, imageProtoId, webProtoId, kvpProtoId, videoProtoId, audioProtoId, pdfProtoId, iconProtoId] + let protoIds = [textProtoId, histoProtoId, collProtoId, imageProtoId, webProtoId, kvpProtoId, videoProtoId, audioProtoId, pdfProtoId, iconProtoId]; // fetch the actual prototype documents from the server let actualProtos = await DocServer.getRefFields(protoIds); @@ -238,7 +238,7 @@ export namespace Docs { y: 0, width: 300, height: 150 - } + }; return buildPrototype(audioProtoId, "AUDIO_PROTO", AudioBox.LayoutString(), defaultAttrs); } } @@ -419,7 +419,7 @@ export namespace Docs { export type DocConfig = { doc: Doc, initialWidth?: number - } + }; export function StandardCollectionDockingDocument(configs: Array, options: DocumentOptions, id?: string, type: string = "row") { let layoutConfig = { diff --git a/src/client/views/collections/CollectionDockingView.tsx b/src/client/views/collections/CollectionDockingView.tsx index 4f5837590..5270a4624 100644 --- a/src/client/views/collections/CollectionDockingView.tsx +++ b/src/client/views/collections/CollectionDockingView.tsx @@ -141,11 +141,11 @@ export class CollectionDockingView extends React.Component { - if (!contentArray || contentArray.length == 0) return; + if (!contentArray || contentArray.length === 0) return; contentArray.forEach(member => { let baseCase = Object.keys(member).includes("props"); if (!baseCase) { - this.injectParentProp(member.content, reverse) + this.injectParentProp(member.content, reverse); } else { reverse ? delete member.props.parent : member.props.parent = this; } @@ -386,10 +386,10 @@ export class CollectionDockingView extends React.Component { console.log("DROPPPP THE BASS!", e); - } + }; ReactDOM.render( CollectionDockingView.AddTab(stack, doc)} />, upDiv); tab.reactComponents = [upDiv]; tab.element.append(upDiv); -- cgit v1.2.3-70-g09d2 From 84e8095347c234d79fa75dc10703a34114f5edfe Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 11 Jul 2019 02:11:05 -0400 Subject: completed first pass at proto refactor --- src/client/documents/Documents.ts | 54 +++++++++++++++------------------------ 1 file changed, 20 insertions(+), 34 deletions(-) (limited to 'src') diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index 31e7eef2c..26862fa7e 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -25,7 +25,7 @@ import { OmitKeys } from "../../Utils"; import { ImageField, VideoField, AudioField, PdfField, WebField } from "../../new_fields/URLField"; import { HtmlField } from "../../new_fields/HtmlField"; import { List } from "../../new_fields/List"; -import { Cast, NumCast, StrCast, ToConstructor, InterfaceValue } from "../../new_fields/Types"; +import { Cast, NumCast, StrCast, ToConstructor, InterfaceValue, FieldValue } from "../../new_fields/Types"; import { IconField } from "../../new_fields/IconField"; import { listSpec } from "../../new_fields/Schema"; import { DocServer } from "../DocServer"; @@ -39,6 +39,8 @@ import { LinkManager } from "../util/LinkManager"; import { DocumentManager } from "../util/DocumentManager"; import DirectoryImportBox from "../util/Import & Export/DirectoryImportBox"; import { Scripting } from "../util/Scripting"; +import { FieldView } from "../views/nodes/FieldView"; +import { Id } from "../../new_fields/FieldSymbols"; var requestImageSize = require('../util/request-image-size'); var path = require('path'); @@ -70,16 +72,6 @@ export namespace DocTypeUtils { return includeNone ? types : types.filter(key => key !== DocTypes.NONE); } - export function toObject(o: Array): { [K in T]: K } { - return o.reduce((res, key) => { - res[key] = key; - return res; - }, Object.create(null)); - } - - const Types = toObject(values()); - - export type All = keyof typeof Types; } export interface DocumentOptions { @@ -115,10 +107,10 @@ export namespace Docs { export namespace Prototypes { type PrototypeTemplate = { options?: Partial, primary: string, background?: string }; - type TemplateMap = Map; - type PrototypeMap = Map; + type TemplateMap = Map; + type PrototypeMap = Map; - const LayoutMap: TemplateMap = new Map([ + const TemplateMap: TemplateMap = new Map([ [DocTypes.TEXT, { options: { height: 150, backgroundColor: "#f1efeb" }, primary: FormattedTextBox.LayoutString() @@ -184,31 +176,21 @@ export namespace Docs { */ export async function initialize(): Promise { // non-guid string ids for each document prototype - let prototypeIds: string[] = DocTypeUtils.values(false).map(type => type + "Proto"); - - let defaultOptions: DocumentOptions = { - x: 0, - y: 0, - width: 300 - }; - + let suffix = "Proto"; + let prototypeIds: string[] = DocTypeUtils.values(false).map(type => type + suffix); // fetch the actual prototype documents from the server let actualProtos = await DocServer.GetRefFields(prototypeIds); - // initialize prototype documents + + let defaultOptions: DocumentOptions = { x: 0, y: 0, width: 300 }; prototypeIds.map(id => { let existing = actualProtos[id] as Doc; - if (existing) { - PrototypeMap.set(id, existing); - } else { - let template = LayoutMap.get(id.replace("Proto", "")); - if (template) { - PrototypeMap.set(id, buildPrototype(template, id, defaultOptions)); - } - } + let type = id.replace(suffix, "") as DocTypes; + let target = existing || buildPrototype(type, id, defaultOptions); + target && PrototypeMap.set(type, target); }); } - export function get(type: string) { + export function get(type: DocTypes) { return PrototypeMap.get(type)!; } @@ -224,7 +206,11 @@ export namespace Docs { * @param options any value specified in the DocumentOptions object likewise * becomes the default value for that key for all delegates */ - function buildPrototype(template: PrototypeTemplate, prototypeId: string, defaultOptions: DocumentOptions): Doc { + function buildPrototype(type: DocTypes, prototypeId: string, defaultOptions: DocumentOptions): Opt { + let template = TemplateMap.get(type); + if (!template) { + return undefined; + } let primary = template.primary; let background = template.background; let options = { ...defaultOptions, ...(template.options || {}), title: prototypeId.toUpperCase().replace("PROTO", "_PROTO") }; @@ -401,7 +387,7 @@ export namespace Docs { } export function DirectoryImportDocument(options: DocumentOptions = {}) { - return InstanceFromProto(Prototypes.get(DocTypes.COL), new List(), options); + return InstanceFromProto(Prototypes.get(DocTypes.IMPORT), new List(), options); } export type DocConfig = { -- cgit v1.2.3-70-g09d2 From 4b3c7ea33d564711566232acf2e8450aee1219fc Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 11 Jul 2019 02:14:14 -0400 Subject: clean up --- src/client/documents/Documents.ts | 3 --- 1 file changed, 3 deletions(-) (limited to 'src') diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index 26862fa7e..b2f69d7af 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -29,7 +29,6 @@ import { Cast, NumCast, StrCast, ToConstructor, InterfaceValue, FieldValue } fro import { IconField } from "../../new_fields/IconField"; import { listSpec } from "../../new_fields/Schema"; import { DocServer } from "../DocServer"; -import { InkField } from "../../new_fields/InkField"; import { dropActionType } from "../util/DragManager"; import { DateField } from "../../new_fields/DateField"; import { UndoManager } from "../util/UndoManager"; @@ -39,8 +38,6 @@ import { LinkManager } from "../util/LinkManager"; import { DocumentManager } from "../util/DocumentManager"; import DirectoryImportBox from "../util/Import & Export/DirectoryImportBox"; import { Scripting } from "../util/Scripting"; -import { FieldView } from "../views/nodes/FieldView"; -import { Id } from "../../new_fields/FieldSymbols"; var requestImageSize = require('../util/request-image-size'); var path = require('path'); -- cgit v1.2.3-70-g09d2 From 9c4ed0eba1ee65271435a950f50fcbc85417eb0b Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 11 Jul 2019 02:32:27 -0400 Subject: naming cleanup --- src/Utils.ts | 6 +- src/client/DocServer.ts | 4 +- src/client/documents/Documents.ts | 84 +++++++++++----------- .../views/collections/CollectionStackingView.tsx | 4 +- .../views/collections/CollectionTreeView.tsx | 4 +- src/client/views/nodes/DocumentView.tsx | 2 +- src/client/views/nodes/LinkMenu.tsx | 2 +- src/client/views/search/FilterBox.tsx | 4 +- src/client/views/search/IconBar.tsx | 4 +- src/client/views/search/IconButton.tsx | 42 +++++------ src/client/views/search/SearchItem.tsx | 26 +++---- 11 files changed, 93 insertions(+), 89 deletions(-) (limited to 'src') diff --git a/src/Utils.ts b/src/Utils.ts index a62f9b4ff..e8a80bdc3 100644 --- a/src/Utils.ts +++ b/src/Utils.ts @@ -78,9 +78,9 @@ export class Utils { socket.emit(message.Message, args); } - public static emitCallback(socket: Socket | SocketIOClient.Socket, message: Message, args: T): Promise; - public static emitCallback(socket: Socket | SocketIOClient.Socket, message: Message, args: T, fn: (args: any) => any): void; - public static emitCallback(socket: Socket | SocketIOClient.Socket, message: Message, args: T, fn?: (args: any) => any): void | Promise { + public static EmitCallback(socket: Socket | SocketIOClient.Socket, message: Message, args: T): Promise; + public static EmitCallback(socket: Socket | SocketIOClient.Socket, message: Message, args: T, fn: (args: any) => any): void; + public static EmitCallback(socket: Socket | SocketIOClient.Socket, message: Message, args: T, fn?: (args: any) => any): void | Promise { this.log("Emit", message.Name, args, false); if (fn) { socket.emit(message.Message, args, this.loggingCallback('Receiving', fn, message.Name)); diff --git a/src/client/DocServer.ts b/src/client/DocServer.ts index 895177841..d05793ea2 100644 --- a/src/client/DocServer.ts +++ b/src/client/DocServer.ts @@ -109,7 +109,7 @@ export namespace DocServer { // synchronously, we emit a single callback to the server requesting the serialized (i.e. represented by a string) // field for the given ids. This returns a promise, which, when resolved, indicates the the JSON serialized version of // the field has been returned from the server - const getSerializedField = Utils.emitCallback(_socket, MessageStore.GetRefField, id); + const getSerializedField = Utils.EmitCallback(_socket, MessageStore.GetRefField, id); // when the serialized RefField has been received, go head and begin deserializing it into an object. // Here, once deserialized, we also invoke .proto to 'load' the document's prototype, which ensures that all @@ -179,7 +179,7 @@ export namespace DocServer { // 2) synchronously, we emit a single callback to the server requesting the serialized (i.e. represented by a string) // fields for the given ids. This returns a promise, which, when resolved, indicates that all the JSON serialized versions of // the fields have been returned from the server - const getSerializedFields: Promise = Utils.emitCallback(_socket, MessageStore.GetRefFields, requestedIds); + const getSerializedFields: Promise = Utils.EmitCallback(_socket, MessageStore.GetRefFields, requestedIds); // 3) when the serialized RefFields have been received, go head and begin deserializing them into objects. // Here, once deserialized, we also invoke .proto to 'load' the documents' prototypes, which ensures that all diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index b2f69d7af..5a3f9574f 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -41,7 +41,7 @@ import { Scripting } from "../util/Scripting"; var requestImageSize = require('../util/request-image-size'); var path = require('path'); -export enum DocTypes { +export enum DocumentType { NONE = "none", TEXT = "text", HIST = "histogram", @@ -60,13 +60,13 @@ export enum DocTypes { export namespace DocTypeUtils { export function values(includeNone: boolean = true): string[] { - let types = Object.values(DocTypes); - return includeNone ? types : types.filter(key => key !== DocTypes.NONE); + let types = Object.values(DocumentType); + return includeNone ? types : types.filter(key => key !== DocumentType.NONE); } export function keys(includeNone: boolean = true): string[] { - let types = Object.keys(DocTypes); - return includeNone ? types : types.filter(key => key !== DocTypes.NONE); + let types = Object.keys(DocumentType); + return includeNone ? types : types.filter(key => key !== DocumentType.NONE); } } @@ -104,55 +104,55 @@ export namespace Docs { export namespace Prototypes { type PrototypeTemplate = { options?: Partial, primary: string, background?: string }; - type TemplateMap = Map; - type PrototypeMap = Map; + type TemplateMap = Map; + type PrototypeMap = Map; const TemplateMap: TemplateMap = new Map([ - [DocTypes.TEXT, { + [DocumentType.TEXT, { options: { height: 150, backgroundColor: "#f1efeb" }, primary: FormattedTextBox.LayoutString() }], - [DocTypes.HIST, { + [DocumentType.HIST, { options: { nativeWidth: 600, curPage: 0 }, primary: CollectionView.LayoutString("annotations"), background: HistogramBox.LayoutString() }], - [DocTypes.IMG, { + [DocumentType.IMG, { options: { height: 300, backgroundColor: "black" }, primary: CollectionView.LayoutString("annotations"), background: ImageBox.LayoutString() }], - [DocTypes.WEB, { + [DocumentType.WEB, { options: { height: 300 }, primary: WebBox.LayoutString() }], - [DocTypes.COL, { + [DocumentType.COL, { options: { panX: 0, panY: 0, scale: 1, width: 500, height: 500 }, primary: CollectionView.LayoutString() }], - [DocTypes.KVP, { + [DocumentType.KVP, { options: { height: 150 }, primary: KeyValueBox.LayoutString() }], - [DocTypes.VID, { + [DocumentType.VID, { options: { nativeWidth: 600, curPage: 0 }, primary: CollectionVideoView.LayoutString("annotations"), background: VideoBox.LayoutString() }], - [DocTypes.AUDIO, { + [DocumentType.AUDIO, { options: { height: 150 }, primary: AudioBox.LayoutString() }], - [DocTypes.PDF, { + [DocumentType.PDF, { options: { nativeWidth: 1200, curPage: 1 }, primary: CollectionPDFView.LayoutString("annotations"), background: PDFBox.LayoutString() }], - [DocTypes.ICON, { + [DocumentType.ICON, { options: { width: Number(MINIMIZED_ICON_SIZE), height: Number(MINIMIZED_ICON_SIZE) }, primary: IconBox.LayoutString() }], - [DocTypes.IMPORT, { + [DocumentType.IMPORT, { options: { height: 150 }, primary: DirectoryImportBox.LayoutString() }] @@ -178,16 +178,20 @@ export namespace Docs { // fetch the actual prototype documents from the server let actualProtos = await DocServer.GetRefFields(prototypeIds); + // update this object to include any default values: DocumentOptions for all prototypes let defaultOptions: DocumentOptions = { x: 0, y: 0, width: 300 }; prototypeIds.map(id => { let existing = actualProtos[id] as Doc; - let type = id.replace(suffix, "") as DocTypes; + let type = id.replace(suffix, "") as DocumentType; + // get or create prototype of the specified type... let target = existing || buildPrototype(type, id, defaultOptions); + // ...and set it if not undefined (can be undefined only if TemplateMap does not contain + // an entry dedicated to the given DocumentType) target && PrototypeMap.set(type, target); }); } - export function get(type: DocTypes) { + export function get(type: DocumentType) { return PrototypeMap.get(type)!; } @@ -203,7 +207,7 @@ export namespace Docs { * @param options any value specified in the DocumentOptions object likewise * becomes the default value for that key for all delegates */ - function buildPrototype(type: DocTypes, prototypeId: string, defaultOptions: DocumentOptions): Opt { + function buildPrototype(type: DocumentType, prototypeId: string, defaultOptions: DocumentOptions): Opt { let template = TemplateMap.get(type); if (!template) { return undefined; @@ -279,7 +283,7 @@ export namespace Docs { } export function ImageDocument(url: string, options: DocumentOptions = {}) { - let inst = InstanceFromProto(Prototypes.get(DocTypes.IMG), new ImageField(new URL(url)), { title: path.basename(url), ...options }); + let inst = InstanceFromProto(Prototypes.get(DocumentType.IMG), new ImageField(new URL(url)), { title: path.basename(url), ...options }); requestImageSize(window.origin + RouteStore.corsProxy + "/" + url) .then((size: any) => { let aspect = size.height / size.width; @@ -294,27 +298,27 @@ export namespace Docs { } export function VideoDocument(url: string, options: DocumentOptions = {}) { - return InstanceFromProto(Prototypes.get(DocTypes.VID), new VideoField(new URL(url)), options); + return InstanceFromProto(Prototypes.get(DocumentType.VID), new VideoField(new URL(url)), options); } export function AudioDocument(url: string, options: DocumentOptions = {}) { - return InstanceFromProto(Prototypes.get(DocTypes.AUDIO), new AudioField(new URL(url)), options); + return InstanceFromProto(Prototypes.get(DocumentType.AUDIO), new AudioField(new URL(url)), options); } export function HistogramDocument(histoOp: HistogramOperation, options: DocumentOptions = {}) { - return InstanceFromProto(Prototypes.get(DocTypes.HIST), new HistogramField(histoOp), options); + return InstanceFromProto(Prototypes.get(DocumentType.HIST), new HistogramField(histoOp), options); } export function TextDocument(options: DocumentOptions = {}) { - return InstanceFromProto(Prototypes.get(DocTypes.TEXT), "", options); + return InstanceFromProto(Prototypes.get(DocumentType.TEXT), "", options); } export function IconDocument(icon: string, options: DocumentOptions = {}) { - return InstanceFromProto(Prototypes.get(DocTypes.ICON), new IconField(icon), options); + return InstanceFromProto(Prototypes.get(DocumentType.ICON), new IconField(icon), options); } export function PdfDocument(url: string, options: DocumentOptions = {}) { - return InstanceFromProto(Prototypes.get(DocTypes.PDF), new PdfField(new URL(url)), options); + return InstanceFromProto(Prototypes.get(DocumentType.PDF), new PdfField(new URL(url)), options); } export async function DBDocument(url: string, options: DocumentOptions = {}, columnOptions: DocumentOptions = {}) { @@ -349,42 +353,42 @@ export namespace Docs { } export function WebDocument(url: string, options: DocumentOptions = {}) { - return InstanceFromProto(Prototypes.get(DocTypes.WEB), new WebField(new URL(url)), options); + return InstanceFromProto(Prototypes.get(DocumentType.WEB), new WebField(new URL(url)), options); } export function HtmlDocument(html: string, options: DocumentOptions = {}) { - return InstanceFromProto(Prototypes.get(DocTypes.WEB), new HtmlField(html), options); + return InstanceFromProto(Prototypes.get(DocumentType.WEB), new HtmlField(html), options); } export function KVPDocument(document: Doc, options: DocumentOptions = {}) { - return InstanceFromProto(Prototypes.get(DocTypes.KVP), document, { title: document.title + ".kvp", ...options }); + return InstanceFromProto(Prototypes.get(DocumentType.KVP), document, { title: document.title + ".kvp", ...options }); } export function FreeformDocument(documents: Array, options: DocumentOptions, makePrototype: boolean = true) { if (!makePrototype) { - return MakeDataDelegate(Prototypes.get(DocTypes.COL), { ...options, viewType: CollectionViewType.Freeform }, new List(documents)); + return MakeDataDelegate(Prototypes.get(DocumentType.COL), { ...options, viewType: CollectionViewType.Freeform }, new List(documents)); } - return InstanceFromProto(Prototypes.get(DocTypes.COL), new List(documents), { schemaColumns: new List(["title"]), ...options, viewType: CollectionViewType.Freeform }); + return InstanceFromProto(Prototypes.get(DocumentType.COL), new List(documents), { schemaColumns: new List(["title"]), ...options, viewType: CollectionViewType.Freeform }); } export function SchemaDocument(schemaColumns: string[], documents: Array, options: DocumentOptions) { - return InstanceFromProto(Prototypes.get(DocTypes.COL), new List(documents), { schemaColumns: new List(schemaColumns), ...options, viewType: CollectionViewType.Schema }); + return InstanceFromProto(Prototypes.get(DocumentType.COL), new List(documents), { schemaColumns: new List(schemaColumns), ...options, viewType: CollectionViewType.Schema }); } export function TreeDocument(documents: Array, options: DocumentOptions) { - return InstanceFromProto(Prototypes.get(DocTypes.COL), new List(documents), { schemaColumns: new List(["title"]), ...options, viewType: CollectionViewType.Tree }); + return InstanceFromProto(Prototypes.get(DocumentType.COL), new List(documents), { schemaColumns: new List(["title"]), ...options, viewType: CollectionViewType.Tree }); } export function StackingDocument(documents: Array, options: DocumentOptions) { - return InstanceFromProto(Prototypes.get(DocTypes.COL), new List(documents), { schemaColumns: new List(["title"]), ...options, viewType: CollectionViewType.Stacking }); + return InstanceFromProto(Prototypes.get(DocumentType.COL), new List(documents), { schemaColumns: new List(["title"]), ...options, viewType: CollectionViewType.Stacking }); } export function DockDocument(documents: Array, config: string, options: DocumentOptions, id?: string) { - return InstanceFromProto(Prototypes.get(DocTypes.COL), new List(documents), { ...options, viewType: CollectionViewType.Docking, dockingConfig: config }, id); + return InstanceFromProto(Prototypes.get(DocumentType.COL), new List(documents), { ...options, viewType: CollectionViewType.Docking, dockingConfig: config }, id); } export function DirectoryImportDocument(options: DocumentOptions = {}) { - return InstanceFromProto(Prototypes.get(DocTypes.IMPORT), new List(), options); + return InstanceFromProto(Prototypes.get(DocumentType.IMPORT), new List(), options); } export type DocConfig = { @@ -538,14 +542,14 @@ export namespace DocUtils { UndoManager.RunInBatch(() => { let linkDoc = Docs.Create.TextDocument({ width: 100, height: 30, borderRounding: "100%" }); - linkDoc.type = DocTypes.LINK; + linkDoc.type = DocumentType.LINK; let linkDocProto = Doc.GetProto(linkDoc); linkDocProto.context = targetContext; linkDocProto.title = title === "" ? source.title + " to " + target.title : title; linkDocProto.linkDescription = description; linkDocProto.linkTags = tags; - linkDocProto.type = DocTypes.LINK; + linkDocProto.type = DocumentType.LINK; linkDocProto.anchor1 = source; linkDocProto.anchor1Page = source.curPage; diff --git a/src/client/views/collections/CollectionStackingView.tsx b/src/client/views/collections/CollectionStackingView.tsx index 9266fc8fd..d26bf5118 100644 --- a/src/client/views/collections/CollectionStackingView.tsx +++ b/src/client/views/collections/CollectionStackingView.tsx @@ -12,7 +12,7 @@ import "./CollectionStackingView.scss"; import { CollectionSubView } from "./CollectionSubView"; import { undoBatch } from "../../util/UndoManager"; import { DragManager } from "../../util/DragManager"; -import { DocTypes } from "../../documents/Documents"; +import { DocumentType } from "../../documents/Documents"; import { Transform } from "../../util/Transform"; @observer @@ -50,7 +50,7 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { } overlays = (doc: Doc) => { - return doc.type === DocTypes.IMG ? { title: "title", caption: "caption" } : {}; + return doc.type === DocumentType.IMG ? { title: "title", caption: "caption" } : {}; } getDisplayDoc(layoutDoc: Doc, d: Doc, dxf: () => Transform) { diff --git a/src/client/views/collections/CollectionTreeView.tsx b/src/client/views/collections/CollectionTreeView.tsx index e88f1a9d0..c8c092760 100644 --- a/src/client/views/collections/CollectionTreeView.tsx +++ b/src/client/views/collections/CollectionTreeView.tsx @@ -9,7 +9,7 @@ import { List } from '../../../new_fields/List'; import { Document, listSpec } from '../../../new_fields/Schema'; import { BoolCast, Cast, NumCast, StrCast } from '../../../new_fields/Types'; import { emptyFunction, Utils } from '../../../Utils'; -import { Docs, DocUtils, DocTypes } from '../../documents/Documents'; +import { Docs, DocUtils, DocumentType } from '../../documents/Documents'; import { DocumentManager } from '../../util/DocumentManager'; import { DragManager, dropActionType, SetupDrag } from "../../util/DragManager"; import { SelectionManager } from '../../util/SelectionManager'; @@ -316,7 +316,7 @@ class TreeView extends React.Component { } @computed get docBounds() { - if (StrCast(this.props.document.type).indexOf(DocTypes.COL) === -1) return undefined; + if (StrCast(this.props.document.type).indexOf(DocumentType.COL) === -1) return undefined; let layoutDoc = Doc.expandTemplateLayout(this.props.document, this.props.dataDoc); return Doc.ComputeContentBounds(layoutDoc); } diff --git a/src/client/views/nodes/DocumentView.tsx b/src/client/views/nodes/DocumentView.tsx index 718552dc9..27b45db76 100644 --- a/src/client/views/nodes/DocumentView.tsx +++ b/src/client/views/nodes/DocumentView.tsx @@ -10,7 +10,7 @@ import { BoolCast, Cast, FieldValue, StrCast, NumCast, PromiseValue } from "../. import { CurrentUserUtils } from "../../../server/authentication/models/current_user_utils"; import { emptyFunction, Utils, returnFalse, returnTrue } from "../../../Utils"; import { DocServer } from "../../DocServer"; -import { Docs, DocUtils, DocTypes } from "../../documents/Documents"; +import { Docs, DocUtils, DocumentType } from "../../documents/Documents"; import { DocumentManager } from "../../util/DocumentManager"; import { DragManager, dropActionType } from "../../util/DragManager"; import { SearchUtil } from "../../util/SearchUtil"; diff --git a/src/client/views/nodes/LinkMenu.tsx b/src/client/views/nodes/LinkMenu.tsx index cccf3c329..1eda7d1fb 100644 --- a/src/client/views/nodes/LinkMenu.tsx +++ b/src/client/views/nodes/LinkMenu.tsx @@ -14,7 +14,7 @@ import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; library.add(faTrash); import { Cast, FieldValue, StrCast } from "../../../new_fields/Types"; import { Id } from "../../../new_fields/FieldSymbols"; -import { DocTypes } from "../../documents/Documents"; +import { DocumentType } from "../../documents/Documents"; interface Props { docView: DocumentView; diff --git a/src/client/views/search/FilterBox.tsx b/src/client/views/search/FilterBox.tsx index 23a1b31d8..c6c18f9b4 100644 --- a/src/client/views/search/FilterBox.tsx +++ b/src/client/views/search/FilterBox.tsx @@ -6,7 +6,7 @@ import { faTimes } from '@fortawesome/free-solid-svg-icons'; import { library } from '@fortawesome/fontawesome-svg-core'; import { Doc } from '../../../new_fields/Doc'; import { Id } from '../../../new_fields/FieldSymbols'; -import { DocTypes } from '../../documents/Documents'; +import { DocumentType } from '../../documents/Documents'; import { Cast, StrCast } from '../../../new_fields/Types'; import * as _ from "lodash"; import { ToggleBar } from './ToggleBar'; @@ -32,7 +32,7 @@ export enum Keys { export class FilterBox extends React.Component { static Instance: FilterBox; - public _allIcons: string[] = [DocTypes.AUDIO, DocTypes.COL, DocTypes.HIST, DocTypes.IMG, DocTypes.LINK, DocTypes.PDF, DocTypes.TEXT, DocTypes.VID, DocTypes.WEB]; + public _allIcons: string[] = [DocumentType.AUDIO, DocumentType.COL, DocumentType.HIST, DocumentType.IMG, DocumentType.LINK, DocumentType.PDF, DocumentType.TEXT, DocumentType.VID, DocumentType.WEB]; //if true, any keywords can be used. if false, all keywords are required. @observable private _basicWordStatus: boolean = true; diff --git a/src/client/views/search/IconBar.tsx b/src/client/views/search/IconBar.tsx index 744dd898a..4712b0abc 100644 --- a/src/client/views/search/IconBar.tsx +++ b/src/client/views/search/IconBar.tsx @@ -4,7 +4,7 @@ import { observable, action } from 'mobx'; // import "./SearchBox.scss"; import "./IconBar.scss"; import "./IconButton.scss"; -import { DocTypes } from '../../documents/Documents'; +import { DocumentType } from '../../documents/Documents'; import { faSearch, faFilePdf, faFilm, faImage, faObjectGroup, faStickyNote, faMusic, faLink, faChartBar, faGlobeAsia, faBan, faTimesCircle, faCheckCircle } from '@fortawesome/free-solid-svg-icons'; import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; import { library } from '@fortawesome/fontawesome-svg-core'; @@ -63,7 +63,7 @@ export class IconBar extends React.Component {
- +
Select All
diff --git a/src/client/views/search/IconButton.tsx b/src/client/views/search/IconButton.tsx index 23ab42de0..bfe2c7d0b 100644 --- a/src/client/views/search/IconButton.tsx +++ b/src/client/views/search/IconButton.tsx @@ -6,7 +6,7 @@ import "./IconButton.scss"; import { faSearch, faFilePdf, faFilm, faImage, faObjectGroup, faStickyNote, faMusic, faLink, faChartBar, faGlobeAsia, faBan, faVideo, faCaretDown } from '@fortawesome/free-solid-svg-icons'; import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; import { library, icon } from '@fortawesome/fontawesome-svg-core'; -import { DocTypes } from '../../documents/Documents'; +import { DocumentType } from '../../documents/Documents'; import '../globalCssVariables.scss'; import * as _ from "lodash"; import { IconBar } from './IconBar'; @@ -80,25 +80,25 @@ export class IconButton extends React.Component{ @action.bound getIcon() { switch (this.props.type) { - case (DocTypes.NONE): + case (DocumentType.NONE): return faBan; - case (DocTypes.AUDIO): + case (DocumentType.AUDIO): return faMusic; - case (DocTypes.COL): + case (DocumentType.COL): return faObjectGroup; - case (DocTypes.HIST): + case (DocumentType.HIST): return faChartBar; - case (DocTypes.IMG): + case (DocumentType.IMG): return faImage; - case (DocTypes.LINK): + case (DocumentType.LINK): return faLink; - case (DocTypes.PDF): + case (DocumentType.PDF): return faFilePdf; - case (DocTypes.TEXT): + case (DocumentType.TEXT): return faStickyNote; - case (DocTypes.VID): + case (DocumentType.VID): return faVideo; - case (DocTypes.WEB): + case (DocumentType.WEB): return faGlobeAsia; default: return faCaretDown; @@ -149,25 +149,25 @@ export class IconButton extends React.Component{ getFA = () => { switch (this.props.type) { - case (DocTypes.NONE): + case (DocumentType.NONE): return (); - case (DocTypes.AUDIO): + case (DocumentType.AUDIO): return (); - case (DocTypes.COL): + case (DocumentType.COL): return (); - case (DocTypes.HIST): + case (DocumentType.HIST): return (); - case (DocTypes.IMG): + case (DocumentType.IMG): return (); - case (DocTypes.LINK): + case (DocumentType.LINK): return (); - case (DocTypes.PDF): + case (DocumentType.PDF): return (); - case (DocTypes.TEXT): + case (DocumentType.TEXT): return (); - case (DocTypes.VID): + case (DocumentType.VID): return (); - case (DocTypes.WEB): + case (DocumentType.WEB): return (); default: return (); diff --git a/src/client/views/search/SearchItem.tsx b/src/client/views/search/SearchItem.tsx index 87cae5487..b34103254 100644 --- a/src/client/views/search/SearchItem.tsx +++ b/src/client/views/search/SearchItem.tsx @@ -8,7 +8,7 @@ import { Doc, DocListCast, HeightSym, WidthSym } from "../../../new_fields/Doc"; import { Id } from "../../../new_fields/FieldSymbols"; import { Cast, NumCast, StrCast } from "../../../new_fields/Types"; import { emptyFunction, returnFalse, returnOne, Utils } from "../../../Utils"; -import { DocTypes } from "../../documents/Documents"; +import { DocumentType } from "../../documents/Documents"; import { DocumentManager } from "../../util/DocumentManager"; import { SetupDrag, DragManager } from "../../util/DragManager"; import { LinkManager } from "../../util/LinkManager"; @@ -119,7 +119,7 @@ export class SearchItem extends React.Component { onPointerEnter={action(() => this._displayDim = this._useIcons ? 50 : Number(SEARCH_THUMBNAIL_SIZE))} onPointerLeave={action(() => this._displayDim = 50)} > { } let layoutresult = StrCast(this.props.doc.type); - let button = layoutresult.indexOf(DocTypes.PDF) !== -1 ? faFilePdf : - layoutresult.indexOf(DocTypes.IMG) !== -1 ? faImage : - layoutresult.indexOf(DocTypes.TEXT) !== -1 ? faStickyNote : - layoutresult.indexOf(DocTypes.VID) !== -1 ? faFilm : - layoutresult.indexOf(DocTypes.COL) !== -1 ? faObjectGroup : - layoutresult.indexOf(DocTypes.AUDIO) !== -1 ? faMusic : - layoutresult.indexOf(DocTypes.LINK) !== -1 ? faLink : - layoutresult.indexOf(DocTypes.HIST) !== -1 ? faChartBar : - layoutresult.indexOf(DocTypes.WEB) !== -1 ? faGlobeAsia : + let button = layoutresult.indexOf(DocumentType.PDF) !== -1 ? faFilePdf : + layoutresult.indexOf(DocumentType.IMG) !== -1 ? faImage : + layoutresult.indexOf(DocumentType.TEXT) !== -1 ? faStickyNote : + layoutresult.indexOf(DocumentType.VID) !== -1 ? faFilm : + layoutresult.indexOf(DocumentType.COL) !== -1 ? faObjectGroup : + layoutresult.indexOf(DocumentType.AUDIO) !== -1 ? faMusic : + layoutresult.indexOf(DocumentType.LINK) !== -1 ? faLink : + layoutresult.indexOf(DocumentType.HIST) !== -1 ? faChartBar : + layoutresult.indexOf(DocumentType.WEB) !== -1 ? faGlobeAsia : faCaretUp; return
{ this._useIcons = false; this._displayDim = Number(SEARCH_THUMBNAIL_SIZE); })} > @@ -184,7 +184,7 @@ export class SearchItem extends React.Component { pointerDown = (e: React.PointerEvent) => { e.preventDefault(); e.button === 0 && SearchBox.Instance.openSearch(e); } highlightDoc = (e: React.PointerEvent) => { - if (this.props.doc.type === DocTypes.LINK) { + if (this.props.doc.type === DocumentType.LINK) { if (this.props.doc.anchor1 && this.props.doc.anchor2) { let doc1 = Cast(this.props.doc.anchor1, Doc, null); @@ -201,7 +201,7 @@ export class SearchItem extends React.Component { } unHighlightDoc = (e: React.PointerEvent) => { - if (this.props.doc.type === DocTypes.LINK) { + if (this.props.doc.type === DocumentType.LINK) { if (this.props.doc.anchor1 && this.props.doc.anchor2) { let doc1 = Cast(this.props.doc.anchor1, Doc, null); -- cgit v1.2.3-70-g09d2 From 1a140c93e9c04b7507d470621e953ddc2a99520b Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 11 Jul 2019 02:35:15 -0400 Subject: removed extraneous docs namespace --- src/client/documents/Documents.ts | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) (limited to 'src') diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index 5a3f9574f..961cd3479 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -57,20 +57,6 @@ export enum DocumentType { LINK = "link" } -export namespace DocTypeUtils { - - export function values(includeNone: boolean = true): string[] { - let types = Object.values(DocumentType); - return includeNone ? types : types.filter(key => key !== DocumentType.NONE); - } - - export function keys(includeNone: boolean = true): string[] { - let types = Object.keys(DocumentType); - return includeNone ? types : types.filter(key => key !== DocumentType.NONE); - } - -} - export interface DocumentOptions { x?: number; y?: number; @@ -174,7 +160,7 @@ export namespace Docs { export async function initialize(): Promise { // non-guid string ids for each document prototype let suffix = "Proto"; - let prototypeIds: string[] = DocTypeUtils.values(false).map(type => type + suffix); + let prototypeIds = Object.values(DocumentType).filter(type => type !== DocumentType.NONE).map(type => type + suffix); // fetch the actual prototype documents from the server let actualProtos = await DocServer.GetRefFields(prototypeIds); -- cgit v1.2.3-70-g09d2 From 79275265ddc04d8c590fc884eeccf00762c07384 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 11 Jul 2019 11:17:22 -0400 Subject: more changes --- src/client/documents/Documents.ts | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) (limited to 'src') diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index 961cd3479..cda116570 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -145,6 +145,8 @@ export namespace Docs { ]); const PrototypeMap: PrototypeMap = new Map(); + const defaultOptions: DocumentOptions = { x: 0, y: 0, width: 300 }; + const Suffix = "Proto"; /** * This function loads or initializes the prototype for each docment type. @@ -165,12 +167,11 @@ export namespace Docs { let actualProtos = await DocServer.GetRefFields(prototypeIds); // update this object to include any default values: DocumentOptions for all prototypes - let defaultOptions: DocumentOptions = { x: 0, y: 0, width: 300 }; prototypeIds.map(id => { let existing = actualProtos[id] as Doc; let type = id.replace(suffix, "") as DocumentType; // get or create prototype of the specified type... - let target = existing || buildPrototype(type, id, defaultOptions); + let target = existing || buildPrototype(type, id); // ...and set it if not undefined (can be undefined only if TemplateMap does not contain // an entry dedicated to the given DocumentType) target && PrototypeMap.set(type, target); @@ -193,14 +194,15 @@ export namespace Docs { * @param options any value specified in the DocumentOptions object likewise * becomes the default value for that key for all delegates */ - function buildPrototype(type: DocumentType, prototypeId: string, defaultOptions: DocumentOptions): Opt { + function buildPrototype(type: DocumentType, prototypeId: string): Opt { let template = TemplateMap.get(type); if (!template) { return undefined; } let primary = template.primary; let background = template.background; - let options = { ...defaultOptions, ...(template.options || {}), title: prototypeId.toUpperCase().replace("PROTO", "_PROTO") }; + let upper = Suffix.toUpperCase(); + let options = { ...defaultOptions, ...(template.options || {}), title: prototypeId.toUpperCase().replace(upper, `_${upper}`) }; background && (options = { ...options, backgroundLayout: background, }); return Doc.assign(new Doc(prototypeId, true), { ...options, layout: primary, baseLayout: primary }); } -- cgit v1.2.3-70-g09d2 From 145bc737a6f234821c941a14e0a859a5c5eac307 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 11 Jul 2019 12:48:57 -0400 Subject: fixed image issue and added undo redo opacity changes --- src/client/documents/Documents.ts | 5 +++-- .../collections/collectionFreeForm/CollectionFreeFormView.tsx | 7 ++++--- 2 files changed, 7 insertions(+), 5 deletions(-) (limited to 'src') diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index cda116570..62ec2f8bc 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -104,7 +104,7 @@ export namespace Docs { background: HistogramBox.LayoutString() }], [DocumentType.IMG, { - options: { height: 300, backgroundColor: "black" }, + options: { nativeWidth: 600, curPage: 0 }, primary: CollectionView.LayoutString("annotations"), background: ImageBox.LayoutString() }], @@ -202,7 +202,8 @@ export namespace Docs { let primary = template.primary; let background = template.background; let upper = Suffix.toUpperCase(); - let options = { ...defaultOptions, ...(template.options || {}), title: prototypeId.toUpperCase().replace(upper, `_${upper}`) }; + let title = prototypeId.toUpperCase().replace(upper, `_${upper}`); + let options = { title: title, type: type, ...defaultOptions, ...(template.options || {}) }; background && (options = { ...options, backgroundLayout: background, }); return Doc.assign(new Doc(prototypeId, true), { ...options, layout: primary, baseLayout: primary }); } diff --git a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx index 1867938a7..12240451b 100644 --- a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx +++ b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx @@ -283,12 +283,13 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { const panY = this.Document.panY; const id = this.Document[Id]; const state = HistoryUtil.getState(); + state.initializers = state.initializers || {}; // TODO This technically isn't correct if type !== "doc", as // currently nothing is done, but we should probably push a new state if (state.type === "doc" && panX !== undefined && panY !== undefined) { - const init = state.initializers![id]; + const init = state.initializers[id]; if (!init) { - state.initializers![id] = { + state.initializers[id] = { panX, panY }; HistoryUtil.pushState(state); @@ -302,7 +303,7 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { const newPanX = NumCast(doc.x) + NumCast(doc.width) / NumCast(doc.zoomBasis, 1) / 2; const newPanY = NumCast(doc.y) + NumCast(doc.height) / NumCast(doc.zoomBasis, 1) / 2; const newState = HistoryUtil.getState(); - newState.initializers![id] = { panX: newPanX, panY: newPanY }; + (newState.initializers || (newState.initializers = {}))[id] = { panX: newPanX, panY: newPanY }; HistoryUtil.pushState(newState); this.setPan(newPanX, newPanY); -- cgit v1.2.3-70-g09d2 From ba09a26ba84bd8315b1a05e43f7cc0f2b2bd5007 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 11 Jul 2019 12:52:01 -0400 Subject: prototype default options fix --- src/client/documents/Documents.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src') diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index 62ec2f8bc..f777d8ef7 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -99,7 +99,7 @@ export namespace Docs { primary: FormattedTextBox.LayoutString() }], [DocumentType.HIST, { - options: { nativeWidth: 600, curPage: 0 }, + options: { height: 300, backgroundColor: "black" }, primary: CollectionView.LayoutString("annotations"), background: HistogramBox.LayoutString() }], -- cgit v1.2.3-70-g09d2 From 98d840f9e155bb15c7e1fbc13212d063f17a08e2 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 11 Jul 2019 17:07:32 -0400 Subject: final documents.ts refactor --- src/client/documents/Documents.ts | 86 +++++++++++++++++++++++++-------------- src/client/views/MainView.tsx | 4 +- 2 files changed, 57 insertions(+), 33 deletions(-) (limited to 'src') diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index f777d8ef7..76685f090 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -89,62 +89,67 @@ export namespace Docs { export namespace Prototypes { - type PrototypeTemplate = { options?: Partial, primary: string, background?: string }; + type LayoutSource = { + LayoutString: (fieldKey?: string) => string + }; + type PrototypeTemplate = { + layout: { + component: LayoutSource, + collection?: LayoutSource + }, + options?: Partial + }; type TemplateMap = Map; type PrototypeMap = Map; const TemplateMap: TemplateMap = new Map([ [DocumentType.TEXT, { - options: { height: 150, backgroundColor: "#f1efeb" }, - primary: FormattedTextBox.LayoutString() + layout: { component: FormattedTextBox }, + options: { height: 150, backgroundColor: "#f1efeb" } }], [DocumentType.HIST, { - options: { height: 300, backgroundColor: "black" }, - primary: CollectionView.LayoutString("annotations"), - background: HistogramBox.LayoutString() + layout: { component: HistogramBox, collection: CollectionView }, + options: { height: 300, backgroundColor: "black" } }], [DocumentType.IMG, { - options: { nativeWidth: 600, curPage: 0 }, - primary: CollectionView.LayoutString("annotations"), - background: ImageBox.LayoutString() + layout: { component: ImageBox, collection: CollectionView }, + options: { nativeWidth: 600, curPage: 0 } }], [DocumentType.WEB, { - options: { height: 300 }, - primary: WebBox.LayoutString() + layout: { component: WebBox }, + options: { height: 300 } }], [DocumentType.COL, { - options: { panX: 0, panY: 0, scale: 1, width: 500, height: 500 }, - primary: CollectionView.LayoutString() + layout: { component: CollectionView }, + options: { panX: 0, panY: 0, scale: 1, width: 500, height: 500 } }], [DocumentType.KVP, { - options: { height: 150 }, - primary: KeyValueBox.LayoutString() + layout: { component: KeyValueBox }, + options: { height: 150 } }], [DocumentType.VID, { + layout: { component: VideoBox, collection: CollectionVideoView }, options: { nativeWidth: 600, curPage: 0 }, - primary: CollectionVideoView.LayoutString("annotations"), - background: VideoBox.LayoutString() }], [DocumentType.AUDIO, { - options: { height: 150 }, - primary: AudioBox.LayoutString() + layout: { component: AudioBox }, + options: { height: 150 } }], [DocumentType.PDF, { - options: { nativeWidth: 1200, curPage: 1 }, - primary: CollectionPDFView.LayoutString("annotations"), - background: PDFBox.LayoutString() + layout: { component: PDFBox, collection: CollectionPDFView }, + options: { nativeWidth: 1200, curPage: 1 } }], [DocumentType.ICON, { + layout: { component: IconBox }, options: { width: Number(MINIMIZED_ICON_SIZE), height: Number(MINIMIZED_ICON_SIZE) }, - primary: IconBox.LayoutString() }], [DocumentType.IMPORT, { - options: { height: 150 }, - primary: DirectoryImportBox.LayoutString() + layout: { component: DirectoryImportBox }, + options: { height: 150 } }] ]); - const PrototypeMap: PrototypeMap = new Map(); + // All document prototypes are initialized with at least these values const defaultOptions: DocumentOptions = { x: 0, y: 0, width: 300 }; const Suffix = "Proto"; @@ -178,7 +183,14 @@ export namespace Docs { }); } - export function get(type: DocumentType) { + /** + * Retrieves the prototype for the given document type, or + * undefined if that type's proto doesn't have a configuration + * in the template map. + * @param type + */ + const PrototypeMap: PrototypeMap = new Map(); + export function get(type: DocumentType): Doc { return PrototypeMap.get(type)!; } @@ -195,17 +207,29 @@ export namespace Docs { * becomes the default value for that key for all delegates */ function buildPrototype(type: DocumentType, prototypeId: string): Opt { + // load template from type let template = TemplateMap.get(type); if (!template) { return undefined; } - let primary = template.primary; - let background = template.background; + let layout = template.layout; + + // create title let upper = Suffix.toUpperCase(); let title = prototypeId.toUpperCase().replace(upper, `_${upper}`); + + // synthesize the default options, the type and title from computed values and + // whatever options pertain to this specific prototype let options = { title: title, type: type, ...defaultOptions, ...(template.options || {}) }; - background && (options = { ...options, backgroundLayout: background, }); - return Doc.assign(new Doc(prototypeId, true), { ...options, layout: primary, baseLayout: primary }); + let primary = layout.component.LayoutString(); + let collection = layout.collection; + if (collection) { + options.layout = collection.LayoutString("annotations"); + options.backgroundLayout = primary; + } else { + options.layout = primary; + } + return Doc.assign(new Doc(prototypeId, true), { ...options, baseLayout: primary }); } } diff --git a/src/client/views/MainView.tsx b/src/client/views/MainView.tsx index b9e5719fd..a99958f1a 100644 --- a/src/client/views/MainView.tsx +++ b/src/client/views/MainView.tsx @@ -386,8 +386,8 @@ export class MainView extends React.Component {
  • -
  • -
  • +
  • +