aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTyler Schicke <tyler_schicke@brown.edu>2019-07-25 12:29:35 -0400
committerTyler Schicke <tyler_schicke@brown.edu>2019-07-25 12:29:35 -0400
commitce71ea9b87427e41c65012c297aab9afc712b4ba (patch)
tree52f7b7f46be3422a6fcae0b44c2703a6d05fa4e0
parent7a236b1dbd609827828010f52df8eed6e54b6cd5 (diff)
parentb4888c53bcc4eb9a37ed4b3dac740bfffc64d3c5 (diff)
Merge branch 'master' of github-tsch-brown:browngraphicslab/Dash-Web
-rw-r--r--src/client/DocServer.ts1
-rw-r--r--src/client/cognitive_services/CognitiveServices.ts192
-rw-r--r--src/client/views/DocumentDecorations.tsx2
-rw-r--r--src/client/views/InkingCanvas.tsx4
-rw-r--r--src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx9
-rw-r--r--src/client/views/nodes/DocumentView.tsx15
-rw-r--r--src/client/views/nodes/ImageBox.tsx2
-rw-r--r--src/client/views/presentationview/PresentationView.tsx18
-rw-r--r--src/scraping/buxton/scraper.py17
9 files changed, 145 insertions, 115 deletions
diff --git a/src/client/DocServer.ts b/src/client/DocServer.ts
index de5e052b9..cb460799f 100644
--- a/src/client/DocServer.ts
+++ b/src/client/DocServer.ts
@@ -5,7 +5,6 @@ import { Utils, emptyFunction } from '../Utils';
import { SerializationHelper } from './util/SerializationHelper';
import { RefField } from '../new_fields/RefField';
import { Id, HandleUpdate } from '../new_fields/FieldSymbols';
-import { CurrentUserUtils } from '../server/authentication/models/current_user_utils';
/**
* This class encapsulates the transfer and cross-client synchronization of
diff --git a/src/client/cognitive_services/CognitiveServices.ts b/src/client/cognitive_services/CognitiveServices.ts
index dcd27f858..d69378d0e 100644
--- a/src/client/cognitive_services/CognitiveServices.ts
+++ b/src/client/cognitive_services/CognitiveServices.ts
@@ -1,5 +1,5 @@
import * as request from "request-promise";
-import { Doc, Field } from "../../new_fields/Doc";
+import { Doc, Field, Opt } from "../../new_fields/Doc";
import { Cast } from "../../new_fields/Types";
import { ImageField } from "../../new_fields/URLField";
import { List } from "../../new_fields/List";
@@ -10,7 +10,16 @@ import { CompileScript } from "../util/Scripting";
import { ComputedField } from "../../new_fields/ScriptField";
import { InkData } from "../../new_fields/InkField";
-export enum Services {
+type APIManager<D> = { converter: BodyConverter<D>, requester: RequestExecutor, analyzer: AnalysisApplier };
+type RequestExecutor = (apiKey: string, body: string, service: Service) => Promise<string>;
+type AnalysisApplier = (target: Doc, relevantKeys: string[], ...args: any) => any;
+type BodyConverter<D> = (data: D) => string;
+type Converter = (results: any) => Field;
+
+export type Tag = { name: string, confidence: number };
+export type Rectangle = { top: number, left: number, width: number, height: number };
+
+export enum Service {
ComputerVision = "vision",
Face = "face",
Handwriting = "handwriting"
@@ -25,11 +34,6 @@ export enum Confidence {
Excellent = 0.95
}
-export type Tag = { name: string, confidence: number };
-export type Rectangle = { top: number, left: number, width: number, height: number };
-export type Face = { faceAttributes: any, faceId: string, faceRectangle: Rectangle };
-export type Converter = (results: any) => Field;
-
/**
* A file that handles all interactions with Microsoft Azure's Cognitive
* Services APIs. These machine learning endpoints allow basic data analytics for
@@ -37,19 +41,36 @@ export type Converter = (results: any) => Field;
*/
export namespace CognitiveServices {
+ const executeQuery = async <D, R>(service: Service, manager: APIManager<D>, data: D): Promise<Opt<R>> => {
+ return fetch(Utils.prepend(`${RouteStore.cognitiveServices}/${service}`)).then(async response => {
+ let apiKey = await response.text();
+ if (!apiKey) {
+ console.log(`No API key found for ${service}: ensure index.ts has access to a .env file in your root directory`);
+ return undefined;
+ }
+
+ let results: Opt<R>;
+ try {
+ results = await manager.requester(apiKey, manager.converter(data), service).then(json => JSON.parse(json));
+ } catch {
+ results = undefined;
+ }
+ return results;
+ });
+ };
+
export namespace Image {
- export const analyze = async (imageUrl: string, service: Services) => {
- return fetch(Utils.prepend(`${RouteStore.cognitiveServices}/${service}`)).then(async response => {
- let apiKey = await response.text();
- if (!apiKey) {
- return undefined;
- }
+ export const Manager: APIManager<string> = {
+
+ converter: (imageUrl: string) => JSON.stringify({ url: imageUrl }),
+
+ requester: async (apiKey: string, body: string, service: Service) => {
let uriBase;
let parameters;
switch (service) {
- case Services.Face:
+ case Service.Face:
uriBase = 'face/v1.0/detect';
parameters = {
'returnFaceId': 'true',
@@ -58,7 +79,7 @@ export namespace CognitiveServices {
'emotion,hair,makeup,occlusion,accessories,blur,exposure,noise'
};
break;
- case Services.ComputerVision:
+ case Service.ComputerVision:
uriBase = 'vision/v2.0/analyze';
parameters = {
'visualFeatures': 'Categories,Description,Color,Objects,Tags,Adult',
@@ -71,42 +92,40 @@ export namespace CognitiveServices {
const options = {
uri: 'https://eastus.api.cognitive.microsoft.com/' + uriBase,
qs: parameters,
- body: `{"url": "${imageUrl}"}`,
+ body: body,
headers: {
'Content-Type': 'application/json',
'Ocp-Apim-Subscription-Key': apiKey
}
};
- let results: any;
- try {
- results = await request.post(options).then(response => JSON.parse(response));
- } catch (e) {
- results = undefined;
- }
- return results;
- });
- };
+ return request.post(options);
+ },
- const analyzeDocument = async (target: Doc, service: Services, converter: Converter, storageKey: string) => {
- let imageData = Cast(target.data, ImageField);
- if (!imageData || await Cast(target[storageKey], Doc)) {
- return;
- }
- let toStore: any;
- let results = await analyze(imageData.url.href, service);
- if (!results) {
- toStore = "Cognitive Services could not process the given image URL.";
- } else {
- if (!results.length) {
- toStore = converter(results);
+ analyzer: async (target: Doc, keys: string[], service: Service, converter: Converter) => {
+ let imageData = Cast(target.data, ImageField);
+ let storageKey = keys[0];
+ if (!imageData || await Cast(target[storageKey], Doc)) {
+ return;
+ }
+ let toStore: any;
+ let results = await executeQuery<string, any>(service, Manager, imageData.url.href);
+ if (!results) {
+ toStore = "Cognitive Services could not process the given image URL.";
} else {
- toStore = results.length > 0 ? converter(results) : "Empty list returned.";
+ if (!results.length) {
+ toStore = converter(results);
+ } else {
+ toStore = results.length > 0 ? converter(results) : "Empty list returned.";
+ }
}
+ target[storageKey] = toStore;
}
- target[storageKey] = toStore;
+
};
+ export type Face = { faceAttributes: any, faceId: string, faceRectangle: Rectangle };
+
export const generateMetadata = async (target: Doc, threshold: Confidence = Confidence.Excellent) => {
let converter = (results: any) => {
let tagDoc = new Doc;
@@ -120,7 +139,7 @@ export namespace CognitiveServices {
tagDoc.confidence = threshold;
return tagDoc;
};
- analyzeDocument(target, Services.ComputerVision, converter, "generatedTags");
+ Manager.analyzer(target, ["generatedTags"], Service.ComputerVision, converter);
};
export const extractFaces = async (target: Doc) => {
@@ -129,48 +148,43 @@ export namespace CognitiveServices {
results.map((face: Face) => faceDocs.push(Docs.Get.DocumentHierarchyFromJson(face, `Face: ${face.faceId}`)!));
return faceDocs;
};
- analyzeDocument(target, Services.Face, converter, "faces");
+ Manager.analyzer(target, ["faces"], Service.Face, converter);
};
}
export namespace Inking {
- export interface AzureStrokeData {
- id: number;
- points: string;
- language?: string;
- }
-
- export interface HandwritingUnit {
- version: number;
- language: string;
- unit: string;
- strokes: AzureStrokeData[];
- }
-
- export const analyze = async (inkData: InkData, target: Doc) => {
- return fetch(Utils.prepend(`${RouteStore.cognitiveServices}/${Services.Handwriting}`)).then(async response => {
- let apiKey = await response.text();
- if (!apiKey) {
- return undefined;
+ export const Manager: APIManager<InkData> = {
+
+ converter: (inkData: InkData): string => {
+ let entries = inkData.entries(), next = entries.next();
+ let strokes: AzureStrokeData[] = [], id = 0;
+ while (!next.done) {
+ strokes.push({
+ id: id++,
+ points: next.value[1].pathData.map(point => `${point.x},${point.y}`).join(","),
+ language: "en-US"
+ });
+ next = entries.next();
}
+ return JSON.stringify({
+ version: 1,
+ language: "en-US",
+ unit: "mm",
+ strokes: strokes
+ });
+ },
+ requester: async (apiKey: string, body: string) => {
let xhttp = new XMLHttpRequest();
let serverAddress = "https://api.cognitive.microsoft.com";
let endpoint = serverAddress + "/inkrecognizer/v1.0-preview/recognize";
- let requestExecutor = (resolve: any, reject: any) => {
- let result: any;
- let body = format(inkData);
-
+ let promisified = (resolve: any, reject: any) => {
xhttp.onreadystatechange = function () {
if (this.readyState === 4) {
- try {
- result = JSON.parse(xhttp.responseText);
- } catch (e) {
- return reject(e);
- }
+ let result = xhttp.responseText;
switch (this.status) {
case 200:
return resolve(result);
@@ -187,37 +201,35 @@ export namespace CognitiveServices {
xhttp.send(body);
};
- let results = await new Promise<any>(requestExecutor);
+ return new Promise<any>(promisified);
+ },
+ analyzer: async (target: Doc, keys: string[], inkData: InkData) => {
+ let results = await executeQuery<InkData, any>(Service.Handwriting, Manager, inkData);
if (results) {
results.recognitionUnits && (results = results.recognitionUnits);
- target.inkAnalysis = Docs.Get.DocumentHierarchyFromJson(results, "Ink Analysis");
+ target[keys[0]] = Docs.Get.DocumentHierarchyFromJson(results, "Ink Analysis");
let recognizedText = results.map((item: any) => item.recognizedText);
let individualWords = recognizedText.filter((text: string) => text && text.split(" ").length === 1);
- target.handwriting = individualWords.join(" ");
+ target[keys[1]] = individualWords.join(" ");
}
- });
- };
-
- const format = (inkData: InkData): string => {
- let entries = inkData.entries(), next = entries.next();
- let strokes: AzureStrokeData[] = [], id = 0;
- while (!next.done) {
- strokes.push({
- id: id++,
- points: next.value[1].pathData.map(point => `${point.x},${point.y}`).join(","),
- language: "en-US"
- });
- next = entries.next();
}
- return JSON.stringify({
- version: 1,
- language: "en-US",
- unit: "mm",
- strokes: strokes
- });
+
};
+ export interface AzureStrokeData {
+ id: number;
+ points: string;
+ language?: string;
+ }
+
+ export interface HandwritingUnit {
+ version: number;
+ language: string;
+ unit: string;
+ strokes: AzureStrokeData[];
+ }
+
}
} \ No newline at end of file
diff --git a/src/client/views/DocumentDecorations.tsx b/src/client/views/DocumentDecorations.tsx
index 989b35581..6fecbd3a7 100644
--- a/src/client/views/DocumentDecorations.tsx
+++ b/src/client/views/DocumentDecorations.tsx
@@ -304,7 +304,7 @@ export class DocumentDecorations extends React.Component<{}, { value: string }>
iconDoc.height = Number(MINIMIZED_ICON_SIZE);
iconDoc.x = NumCast(doc.x);
iconDoc.y = NumCast(doc.y) - 24;
- iconDoc.maximizedDocs = new List<Doc>(selected.map(s => s.props.Document.proto!));
+ iconDoc.maximizedDocs = new List<Doc>(selected.map(s => s.props.Document));
selected.length === 1 && (doc.minimizedDoc = iconDoc);
selected[0].props.addDocument && selected[0].props.addDocument(iconDoc, false);
return iconDoc;
diff --git a/src/client/views/InkingCanvas.tsx b/src/client/views/InkingCanvas.tsx
index e48b45b56..c4cd863d1 100644
--- a/src/client/views/InkingCanvas.tsx
+++ b/src/client/views/InkingCanvas.tsx
@@ -6,12 +6,10 @@ import "./InkingCanvas.scss";
import { InkingControl } from "./InkingControl";
import { InkingStroke } from "./InkingStroke";
import React = require("react");
-import { undoBatch, UndoManager } from "../util/UndoManager";
+import { UndoManager } from "../util/UndoManager";
import { StrokeData, InkField, InkTool } from "../../new_fields/InkField";
import { Doc } from "../../new_fields/Doc";
import { Cast, PromiseValue, NumCast } from "../../new_fields/Types";
-import { CognitiveServices } from "../cognitive_services/CognitiveServices";
-import { ContextMenu } from "./ContextMenu";
interface InkCanvasProps {
getScreenTransform: () => Transform;
diff --git a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx
index bd5c6f84b..701574cfc 100644
--- a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx
+++ b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx
@@ -32,7 +32,10 @@ import { OverlayView, OverlayElementOptions } from "../../OverlayView";
import { ScriptBox } from "../../ScriptBox";
import { CompileScript } from "../../../util/Scripting";
import { CognitiveServices } from "../../../cognitive_services/CognitiveServices";
+import { library } from "@fortawesome/fontawesome-svg-core";
+import { faEye } from "@fortawesome/free-regular-svg-icons";
+library.add(faEye);
export const panZoomSchema = createSchema({
panX: "number",
@@ -519,8 +522,10 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) {
if (!data) {
return;
}
- CognitiveServices.Inking.analyze(data.inkData, Doc.GetProto(this.props.Document));
- }
+ let target = Doc.GetProto(this.props.Document);
+ let relevantKeys = ["inkAnalysis", "handwriting"];
+ CognitiveServices.Inking.Manager.analyzer(target, relevantKeys, data.inkData);
+ }, icon: "eye"
});
}
diff --git a/src/client/views/nodes/DocumentView.tsx b/src/client/views/nodes/DocumentView.tsx
index 2b228f4e4..b39c77adb 100644
--- a/src/client/views/nodes/DocumentView.tsx
+++ b/src/client/views/nodes/DocumentView.tsx
@@ -195,10 +195,10 @@ export class DocumentView extends DocComponent<DocumentViewProps, Document>(Docu
DocumentView.animateBetweenIconFunc(doc, width, height, stime, maximizing, cb);
}
else {
- Doc.GetProto(doc).isMinimized = !maximizing;
- Doc.GetProto(doc).isIconAnimating = undefined;
+ doc.isMinimized = !maximizing;
+ doc.isIconAnimating = undefined;
}
- Doc.GetProto(doc).willMaximize = false;
+ doc.willMaximize = false;
},
2);
}
@@ -319,20 +319,19 @@ export class DocumentView extends DocComponent<DocumentViewProps, Document>(Docu
expandedDocs = summarizedDocs ? [...summarizedDocs, ...expandedDocs] : expandedDocs;
// let expandedDocs = [...(subBulletDocs ? subBulletDocs : []), ...(maximizedDocs ? maximizedDocs : []), ...(summarizedDocs ? summarizedDocs : []),];
if (expandedDocs.length) { // bcz: need a better way to associate behaviors with click events on widget-documents
- let expandedProtoDocs = expandedDocs.map(doc => Doc.GetProto(doc));
let maxLocation = StrCast(this.props.Document.maximizeLocation, "inPlace");
let getDispDoc = (target: Doc) => Object.getOwnPropertyNames(target).indexOf("isPrototype") === -1 ? target : Doc.MakeDelegate(target);
if (altKey || ctrlKey) {
maxLocation = this.props.Document.maximizeLocation = (ctrlKey ? maxLocation : (maxLocation === "inPlace" || !maxLocation ? "inTab" : "inPlace"));
if (!maxLocation || maxLocation === "inPlace") {
- let hadView = expandedDocs.length === 1 && DocumentManager.Instance.getDocumentView(expandedProtoDocs[0], this.props.ContainingCollectionView);
+ let hadView = expandedDocs.length === 1 && DocumentManager.Instance.getDocumentView(expandedDocs[0], this.props.ContainingCollectionView);
let wasMinimized = !hadView && expandedDocs.reduce((min, d) => !min && !BoolCast(d.IsMinimized, false), false);
expandedDocs.forEach(maxDoc => Doc.GetProto(maxDoc).isMinimized = false);
- let hasView = expandedDocs.length === 1 && DocumentManager.Instance.getDocumentView(expandedProtoDocs[0], this.props.ContainingCollectionView);
+ let hasView = expandedDocs.length === 1 && DocumentManager.Instance.getDocumentView(expandedDocs[0], this.props.ContainingCollectionView);
if (!hasView) {
this.props.addDocument && expandedDocs.forEach(async maxDoc => this.props.addDocument!(getDispDoc(maxDoc), false));
}
- expandedProtoDocs.forEach(maxDoc => maxDoc.isMinimized = wasMinimized);
+ expandedDocs.forEach(maxDoc => maxDoc.isMinimized = wasMinimized);
}
}
if (maxLocation && maxLocation !== "inPlace" && CollectionDockingView.Instance) {
@@ -344,7 +343,7 @@ export class DocumentView extends DocComponent<DocumentViewProps, Document>(Docu
}
} else {
let scrpt = this.props.ScreenToLocalTransform().scale(this.props.ContentScaling()).inverse().transformPoint(NumCast(this.Document.width) / 2, NumCast(this.Document.height) / 2);
- this.collapseTargetsToPoint(scrpt, expandedProtoDocs);
+ this.collapseTargetsToPoint(scrpt, expandedDocs);
}
}
else if (linkedDocs.length) {
diff --git a/src/client/views/nodes/ImageBox.tsx b/src/client/views/nodes/ImageBox.tsx
index 89ad18dd6..c0c64b6d5 100644
--- a/src/client/views/nodes/ImageBox.tsx
+++ b/src/client/views/nodes/ImageBox.tsx
@@ -216,7 +216,7 @@ export class ImageBox extends DocComponent<FieldViewProps, ImageDocument>(ImageD
});
let modes: ContextMenuProps[] = [];
- let dataDoc = Doc.GetProto(this.Document);
+ let dataDoc = Doc.GetProto(this.props.Document);
modes.push({ description: "Generate Tags", event: () => CognitiveServices.Image.generateMetadata(dataDoc), icon: "tag" });
modes.push({ description: "Find Faces", event: () => CognitiveServices.Image.extractFaces(dataDoc), icon: "camera" });
diff --git a/src/client/views/presentationview/PresentationView.tsx b/src/client/views/presentationview/PresentationView.tsx
index b318f0321..f2fef7f16 100644
--- a/src/client/views/presentationview/PresentationView.tsx
+++ b/src/client/views/presentationview/PresentationView.tsx
@@ -63,6 +63,10 @@ export class PresentationView extends React.Component<PresViewProps> {
@observable titleInputElement: HTMLInputElement | undefined;
@observable PresTitleChangeOpen: boolean = false;
+ @observable opacity = 1;
+ @observable persistOpacity = true;
+ @observable labelOpacity = 0;
+
//initilize class variables
constructor(props: PresViewProps) {
super(props);
@@ -811,7 +815,7 @@ export class PresentationView extends React.Component<PresViewProps> {
let width = NumCast(this.curPresentation.width);
return (
- <div className="presentationView-cont" style={{ width: width, overflow: "hidden" }}>
+ <div className="presentationView-cont" onPointerEnter={action(() => !this.persistOpacity && (this.opacity = 1))} onPointerLeave={action(() => !this.persistOpacity && (this.opacity = 0.4))} style={{ width: width, overflow: "hidden", opacity: this.opacity, transition: "0.7s opacity ease" }}>
<div className="presentationView-heading">
{this.renderSelectOrPresSelection()}
<button title="Close Presentation" className='presentation-icon' onClick={this.closePresentation}><FontAwesomeIcon icon={"times"} /></button>
@@ -830,6 +834,18 @@ export class PresentationView extends React.Component<PresViewProps> {
{this.renderPlayPauseButton()}
<button title="Next" className="presentation-button" onClick={this.next}><FontAwesomeIcon icon={"arrow-right"} /></button>
</div>
+ <input
+ type="checkbox"
+ onChange={action((e: React.ChangeEvent<HTMLInputElement>) => {
+ this.persistOpacity = e.target.checked;
+ this.opacity = this.persistOpacity ? 1 : 0.4;
+ })}
+ checked={this.persistOpacity}
+ style={{ position: "absolute", bottom: 5, left: 5 }}
+ onPointerEnter={action(() => this.labelOpacity = 1)}
+ onPointerLeave={action(() => this.labelOpacity = 0)}
+ />
+ <p style={{ position: "absolute", bottom: 1, left: 22, opacity: this.labelOpacity, transition: "0.7s opacity ease" }}>opacity {this.persistOpacity ? "persistent" : "on focus"}</p>
<PresentationViewList
mainDocument={this.curPresentation}
deleteDocument={this.RemoveDoc}
diff --git a/src/scraping/buxton/scraper.py b/src/scraping/buxton/scraper.py
index 700269727..14490cfe4 100644
--- a/src/scraping/buxton/scraper.py
+++ b/src/scraping/buxton/scraper.py
@@ -15,6 +15,7 @@ source = "./source"
dist = "../../server/public/files"
db = MongoClient("localhost", 27017)["Dash"]
+target_collection = db.newDocuments
schema_guids = []
@@ -84,7 +85,7 @@ def write_schema(parse_results, display_fields, storage_key):
"height": 600,
"panX": 0,
"panY": 0,
- "zoomBasis": 0.5,
+ "zoomBasis": 1,
"zIndex": 2,
"libraryBrush": False,
"viewType": 2
@@ -106,8 +107,8 @@ def write_schema(parse_results, display_fields, storage_key):
fields["isPrototype"] = True
fields["page"] = -1
- db.newDocuments.insert_one(data_doc)
- db.newDocuments.insert_one(view_doc)
+ target_collection.insert_one(data_doc)
+ target_collection.insert_one(view_doc)
data_doc_guid = data_doc["_id"]
print(f"inserted view document ({view_doc_guid})")
@@ -158,8 +159,8 @@ def write_text_doc(content):
"__type": "Doc"
}
- db.newDocuments.insert_one(view_doc)
- db.newDocuments.insert_one(data_doc)
+ target_collection.insert_one(view_doc)
+ target_collection.insert_one(data_doc)
return view_doc_guid
@@ -209,8 +210,8 @@ def write_image(folder, name):
"__type": "Doc"
}
- db.newDocuments.insert_one(view_doc)
- db.newDocuments.insert_one(data_doc)
+ target_collection.insert_one(view_doc)
+ target_collection.insert_one(data_doc)
return view_doc_guid
@@ -372,7 +373,7 @@ parent_guid = write_schema({
}, ["title", "short_description", "original_price"], "data")
print("appending parent schema to main workspace...\n")
-db.newDocuments.update_one(
+target_collection.update_one(
{"fields.title": "WS collection 1"},
{"$push": {"fields.data.fields": {"fieldId": parent_guid, "__type": "proxy"}}}
)