aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/client/ClientRecommender.tsx8
-rw-r--r--src/client/apis/IBM_Recommender.ts66
-rw-r--r--src/client/cognitive_services/CognitiveServices.ts8
-rw-r--r--src/client/util/DocumentManager.ts7
-rw-r--r--src/client/util/DragManager.ts2
-rw-r--r--src/client/util/ProsemirrorExampleTransfer.ts2
-rw-r--r--src/client/util/RichTextSchema.tsx14
-rw-r--r--src/client/util/SearchUtil.ts4
-rw-r--r--src/client/views/KeyphraseQueryView.tsx4
-rw-r--r--src/client/views/collections/CollectionDockingView.tsx4
-rw-r--r--src/client/views/collections/CollectionSubView.tsx2
-rw-r--r--src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx2
-rw-r--r--src/client/views/collections/collectionMulticolumn/CollectionMulticolumnView.tsx2
-rw-r--r--src/client/views/linking/LinkEditor.tsx4
-rw-r--r--src/client/views/nodes/AudioBox.scss15
-rw-r--r--src/client/views/nodes/AudioBox.tsx38
-rw-r--r--src/client/views/nodes/DocuLinkBox.tsx4
-rw-r--r--src/client/views/nodes/DocumentView.tsx18
-rw-r--r--src/client/views/nodes/FormattedTextBox.tsx20
-rw-r--r--src/client/views/nodes/FormattedTextBoxComment.tsx2
-rw-r--r--src/client/views/nodes/PDFBox.tsx10
-rw-r--r--src/client/views/nodes/WebBox.tsx2
-rw-r--r--src/new_fields/Schema.ts2
-rw-r--r--src/server/ApiManagers/UploadManager.ts14
-rw-r--r--src/server/ApiManagers/UtilManager.ts44
-rw-r--r--src/server/DashUploadUtils.ts19
-rw-r--r--src/server/Recommender.ts274
-rw-r--r--src/server/SharedMediaTypes.ts1
28 files changed, 311 insertions, 281 deletions
diff --git a/src/client/ClientRecommender.tsx b/src/client/ClientRecommender.tsx
index cb1674943..0e67a6e57 100644
--- a/src/client/ClientRecommender.tsx
+++ b/src/client/ClientRecommender.tsx
@@ -5,10 +5,10 @@ import { CognitiveServices, Confidence, Tag, Service } from "./cognitive_service
import React = require("react");
import { observer } from "mobx-react";
import { observable, action, computed, reaction } from "mobx";
-var assert = require('assert');
-var sw = require('stopword');
-var FeedParser = require('feedparser');
-var https = require('https');
+// var assert = require('assert');
+// var sw = require('stopword');
+// var FeedParser = require('feedparser');
+// var https = require('https');
import "./ClientRecommender.scss";
import { JSXElement } from "babel-types";
import { RichTextField } from "../new_fields/RichTextField";
diff --git a/src/client/apis/IBM_Recommender.ts b/src/client/apis/IBM_Recommender.ts
index da6257f28..4e1c541c8 100644
--- a/src/client/apis/IBM_Recommender.ts
+++ b/src/client/apis/IBM_Recommender.ts
@@ -1,40 +1,40 @@
-import { Opt } from "../../new_fields/Doc";
+// import { Opt } from "../../new_fields/Doc";
-const NaturalLanguageUnderstandingV1 = require('ibm-watson/natural-language-understanding/v1');
-const { IamAuthenticator } = require('ibm-watson/auth');
+// const NaturalLanguageUnderstandingV1 = require('ibm-watson/natural-language-understanding/v1');
+// const { IamAuthenticator } = require('ibm-watson/auth');
-export namespace IBM_Recommender {
+// export namespace IBM_Recommender {
- // pass to IBM account is Browngfx1
+// // pass to IBM account is Browngfx1
- const naturalLanguageUnderstanding = new NaturalLanguageUnderstandingV1({
- version: '2019-07-12',
- authenticator: new IamAuthenticator({
- apikey: 'tLiYwbRim3CnBcCO4phubpf-zEiGcub1uh0V-sD9OKhw',
- }),
- url: 'https://gateway-wdc.watsonplatform.net/natural-language-understanding/api'
- });
+// const naturalLanguageUnderstanding = new NaturalLanguageUnderstandingV1({
+// version: '2019-07-12',
+// authenticator: new IamAuthenticator({
+// apikey: 'tLiYwbRim3CnBcCO4phubpf-zEiGcub1uh0V-sD9OKhw',
+// }),
+// url: 'https://gateway-wdc.watsonplatform.net/natural-language-understanding/api'
+// });
- const analyzeParams = {
- 'text': 'this is a test of the keyword extraction feature I am integrating into the program',
- 'features': {
- 'keywords': {
- 'sentiment': true,
- 'emotion': true,
- 'limit': 3
- },
- }
- };
+// const analyzeParams = {
+// 'text': 'this is a test of the keyword extraction feature I am integrating into the program',
+// 'features': {
+// 'keywords': {
+// 'sentiment': true,
+// 'emotion': true,
+// 'limit': 3
+// },
+// }
+// };
- export const analyze = async (_parameters: any): Promise<Opt<string>> => {
- try {
- const response = await naturalLanguageUnderstanding.analyze(_parameters);
- console.log(response);
- return (JSON.stringify(response, null, 2));
- } catch (err) {
- console.log('error: ', err);
- return undefined;
- }
- };
+// export const analyze = async (_parameters: any): Promise<Opt<string>> => {
+// try {
+// const response = await naturalLanguageUnderstanding.analyze(_parameters);
+// console.log(response);
+// return (JSON.stringify(response, null, 2));
+// } catch (err) {
+// console.log('error: ', err);
+// return undefined;
+// }
+// };
-} \ No newline at end of file
+// } \ No newline at end of file
diff --git a/src/client/cognitive_services/CognitiveServices.ts b/src/client/cognitive_services/CognitiveServices.ts
index ce829eb1e..542ccf04d 100644
--- a/src/client/cognitive_services/CognitiveServices.ts
+++ b/src/client/cognitive_services/CognitiveServices.ts
@@ -371,9 +371,9 @@ export namespace CognitiveServices {
let args = { method: 'POST', uri: Utils.prepend("/recommender"), body: { keyphrases: keyterms }, json: true };
await requestPromise.post(args).then(async (wordvecs) => {
if (wordvecs) {
- let indices = Object.keys(wordvecs);
+ const indices = Object.keys(wordvecs);
console.log("successful vectorization!");
- var vectorValues = new List<number>();
+ const vectorValues = new List<number>();
indices.forEach((ind: any) => {
//console.log(wordvec.word);
vectorValues.push(wordvecs[ind]);
@@ -389,9 +389,9 @@ export namespace CognitiveServices {
}
export const analyzer = async (dataDoc: Doc, target: Doc, keys: string[], data: string, converter: TextConverter, isMainDoc: boolean = false, isInternal: boolean = true) => {
- let results = await ExecuteQuery(Service.Text, Manager, data);
+ const results = await ExecuteQuery(Service.Text, Manager, data);
console.log("Cognitive Services keyphrases: ", results);
- let { keyterms, external_recommendations, kp_string } = await converter(results, data);
+ const { keyterms, external_recommendations, kp_string } = await converter(results, data);
target[keys[0]] = keyterms;
if (isInternal) {
//await vectorize([data], dataDoc, isMainDoc);
diff --git a/src/client/util/DocumentManager.ts b/src/client/util/DocumentManager.ts
index 4e82459f0..c41304b9f 100644
--- a/src/client/util/DocumentManager.ts
+++ b/src/client/util/DocumentManager.ts
@@ -209,7 +209,12 @@ export class DocumentManager {
const maxLocation = StrCast(linkDoc.maximizeLocation, "inTab");
const targetContext = !Doc.AreProtosEqual(linkFollowDocContexts[reverse ? 1 : 0], currentContext) ? linkFollowDocContexts[reverse ? 1 : 0] : undefined;
const target = linkFollowDocs[reverse ? 1 : 0];
- target.currentTimecode !== undefined && (target.currentTimecode = linkFollowTimecodes[reverse ? 1 : 0]);
+ const annotatedDoc = await Cast(target.annotationOn, Doc);
+ if (annotatedDoc) {
+ annotatedDoc.currentTimecode !== undefined && (target.currentTimecode = linkFollowTimecodes[reverse ? 1 : 0]);
+ } else {
+ target.currentTimecode !== undefined && (target.currentTimecode = linkFollowTimecodes[reverse ? 1 : 0]);
+ }
DocumentManager.Instance.jumpToDocument(linkFollowDocs[reverse ? 1 : 0], zoom, (doc: Doc) => focus(doc, maxLocation), targetContext, linkDoc[Id], undefined, doc);
} else if (link) {
DocumentManager.Instance.jumpToDocument(link, zoom, (doc: Doc) => focus(doc, "onRight"), undefined, undefined);
diff --git a/src/client/util/DragManager.ts b/src/client/util/DragManager.ts
index dab5c842c..c11675894 100644
--- a/src/client/util/DragManager.ts
+++ b/src/client/util/DragManager.ts
@@ -198,7 +198,7 @@ export namespace DragManager {
dropDoc && !dropDoc.creationDate && (dropDoc.creationDate = new DateField);
dropDoc instanceof Doc && AudioBox.ActiveRecordings.map(d => DocUtils.MakeLink({ doc: dropDoc }, { doc: d }, "audio link", "audio timeline"));
return dropDoc;
- }
+ };
const finishDrag = (e: DragCompleteEvent) => {
e.docDragData && (e.docDragData.droppedDocuments =
dragData.draggedDocuments.map(d => !dragData.isSelectionMove && !dragData.userDropAction && ScriptCast(d.onDragStart) ? addAudioTag(ScriptCast(d.onDragStart).script.run({ this: d }).result) :
diff --git a/src/client/util/ProsemirrorExampleTransfer.ts b/src/client/util/ProsemirrorExampleTransfer.ts
index ec5d1e72a..5cbf401d4 100644
--- a/src/client/util/ProsemirrorExampleTransfer.ts
+++ b/src/client/util/ProsemirrorExampleTransfer.ts
@@ -180,7 +180,7 @@ export default function buildKeymap<S extends Schema<any>>(schema: S, props: any
return true;
}
return false;
- }
+ };
bind("Alt-Enter", (state: EditorState<S>, dispatch: (tx: Transaction<Schema<any, any>>) => void) => {
return addTextOnRight(true);
});
diff --git a/src/client/util/RichTextSchema.tsx b/src/client/util/RichTextSchema.tsx
index 0adf060ec..31935df3e 100644
--- a/src/client/util/RichTextSchema.tsx
+++ b/src/client/util/RichTextSchema.tsx
@@ -889,10 +889,10 @@ export class DashFieldView {
e.stopPropagation();
const collview = await Doc.addFieldEnumerations(self._textBoxDoc, node.attrs.fieldKey, [{ title: self._fieldSpan.innerText }]);
collview instanceof Doc && tbox.props.addDocTab(collview, "onRight");
- }
+ };
const updateText = (forceMatch: boolean) => {
self._enumerables.style.display = "none";
- let newText = self._fieldSpan.innerText.startsWith(":=") || self._fieldSpan.innerText.startsWith("=:=") ? ":=-computed-" : self._fieldSpan.innerText;
+ const newText = self._fieldSpan.innerText.startsWith(":=") || self._fieldSpan.innerText.startsWith("=:=") ? ":=-computed-" : self._fieldSpan.innerText;
// look for a document whose id === the fieldKey being displayed. If there's a match, then that document
// holds the different enumerated values for the field in the titles of its collected documents.
@@ -909,12 +909,12 @@ export class DashFieldView {
// if the text starts with a ':=' then treat it as an expression by making a computed field from its value storing it in the key
if (self._fieldSpan.innerText.startsWith(":=") && self._dashDoc) {
- self._dashDoc![self._fieldKey] = ComputedField.MakeFunction(self._fieldSpan.innerText.substring(2));
+ self._dashDoc[self._fieldKey] = ComputedField.MakeFunction(self._fieldSpan.innerText.substring(2));
} else if (self._fieldSpan.innerText.startsWith("=:=") && self._dashDoc) {
Doc.Layout(tbox.props.Document)[self._fieldKey] = ComputedField.MakeFunction(self._fieldSpan.innerText.substring(3));
}
});
- }
+ };
this._fieldSpan = document.createElement("div");
this._fieldSpan.id = Utils.GenerateGuid();
@@ -926,14 +926,14 @@ export class DashFieldView {
this._fieldSpan.onkeypress = function (e: any) { e.stopPropagation(); };
this._fieldSpan.onkeyup = function (e: any) { e.stopPropagation(); };
this._fieldSpan.onmousedown = function (e: any) { e.stopPropagation(); self._enumerables.style.display = "inline-block"; };
- this._fieldSpan.onblur = function (e: any) { updateText(false); }
+ this._fieldSpan.onblur = function (e: any) { updateText(false); };
const setDashDoc = (doc: Doc) => {
self._dashDoc = doc;
if (self._dashDoc && self._options?.length && !self._dashDoc[node.attrs.fieldKey]) {
self._dashDoc[node.attrs.fieldKey] = StrCast(self._options[0].title);
}
- }
+ };
this._fieldSpan.onkeydown = function (e: any) {
e.stopPropagation();
if ((e.key === "a" && e.ctrlKey) || (e.key === "a" && e.metaKey)) {
@@ -976,7 +976,7 @@ export class DashFieldView {
alias._pivotField = self._fieldKey;
tbox.props.addDocTab(alias, "onRight");
}
- }
+ };
this._labelSpan.innerHTML = `${node.attrs.fieldKey}: `;
if (node.attrs.docid) {
DocServer.GetRefField(node.attrs.docid).then(async dashDoc => dashDoc instanceof Doc && runInAction(() => setDashDoc(dashDoc)));
diff --git a/src/client/util/SearchUtil.ts b/src/client/util/SearchUtil.ts
index 64874b994..2d9c807dd 100644
--- a/src/client/util/SearchUtil.ts
+++ b/src/client/util/SearchUtil.ts
@@ -121,12 +121,12 @@ export namespace SearchUtil {
export async function GetAllDocs() {
const query = "*";
- let response = await rp.get(Utils.prepend('/search'), {
+ const response = await rp.get(Utils.prepend('/search'), {
qs:
{ start: 0, rows: 10000, q: query },
});
- let result: IdSearchResult = JSON.parse(response);
+ const result: IdSearchResult = JSON.parse(response);
const { ids, numFound, highlighting } = result;
//console.log(ids.length);
const docMap = await DocServer.GetRefFields(ids);
diff --git a/src/client/views/KeyphraseQueryView.tsx b/src/client/views/KeyphraseQueryView.tsx
index a9dafc4a4..1dc156968 100644
--- a/src/client/views/KeyphraseQueryView.tsx
+++ b/src/client/views/KeyphraseQueryView.tsx
@@ -15,8 +15,8 @@ export class KeyphraseQueryView extends React.Component<KP_Props>{
}
render() {
- let kps = this.props.keyphrases.toString();
- let keyterms = this.props.keyphrases.split(',');
+ const kps = this.props.keyphrases.toString();
+ const keyterms = this.props.keyphrases.split(',');
return (
<div>
<h5>Select queries to send:</h5>
diff --git a/src/client/views/collections/CollectionDockingView.tsx b/src/client/views/collections/CollectionDockingView.tsx
index db8f7d5e4..00e22d6fb 100644
--- a/src/client/views/collections/CollectionDockingView.tsx
+++ b/src/client/views/collections/CollectionDockingView.tsx
@@ -138,8 +138,8 @@ export class CollectionDockingView extends React.Component<SubCollectionViewProp
}
}
return false;
- }
- let retVal = !instance?._goldenLayout.root.contentItems[0].isRow ? false :
+ };
+ const retVal = !instance?._goldenLayout.root.contentItems[0].isRow ? false :
Array.from(instance._goldenLayout.root.contentItems[0].contentItems).some((child: any) => Array.from(child.contentItems).some(tryClose));
retVal && instance.stateChanged();
diff --git a/src/client/views/collections/CollectionSubView.tsx b/src/client/views/collections/CollectionSubView.tsx
index 527623ad4..b995fc7d5 100644
--- a/src/client/views/collections/CollectionSubView.tsx
+++ b/src/client/views/collections/CollectionSubView.tsx
@@ -107,7 +107,7 @@ export function CollectionSubView<T>(schemaCtor: (doc: Doc) => T) {
get childLayoutPairs(): { layout: Doc; data: Doc; }[] {
const { Document, DataDoc } = this.props;
const validPairs = this.childDocs.map(doc => Doc.GetLayoutDataDocPair(Document, !this.props.annotationsKey ? DataDoc : undefined, doc)).filter(pair => pair.layout);
- return validPairs.map(({ data, layout }) => ({ data: data!, layout: layout! })); // this mapping is a bit of a hack to coerce types
+ return validPairs.map(({ data, layout }) => ({ data, layout: layout! })); // this mapping is a bit of a hack to coerce types
}
get childDocList() {
return Cast(this.dataField, listSpec(Doc));
diff --git a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx
index 28f8bc048..7adafea0e 100644
--- a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx
+++ b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx
@@ -801,7 +801,7 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) {
this.props.Document.scrollY = NumCast(doc.y) - offset;
}
- afterFocus && setTimeout(() => afterFocus?.(), 1000);
+ afterFocus && setTimeout(afterFocus, 1000);
} else {
const layoutdoc = Doc.Layout(doc);
const newPanX = NumCast(doc.x) + NumCast(layoutdoc._width) / 2;
diff --git a/src/client/views/collections/collectionMulticolumn/CollectionMulticolumnView.tsx b/src/client/views/collections/collectionMulticolumn/CollectionMulticolumnView.tsx
index bd20781dc..aa8e1fb43 100644
--- a/src/client/views/collections/collectionMulticolumn/CollectionMulticolumnView.tsx
+++ b/src/client/views/collections/collectionMulticolumn/CollectionMulticolumnView.tsx
@@ -223,7 +223,7 @@ export class CollectionMulticolumnView extends CollectionSubView(MulticolumnDocu
*/
@computed
private get contents(): JSX.Element[] | null {
- let { childLayoutPairs } = this;
+ const { childLayoutPairs } = this;
const { Document, PanelHeight } = this.props;
const collector: JSX.Element[] = [];
for (let i = 0; i < childLayoutPairs.length; i++) {
diff --git a/src/client/views/linking/LinkEditor.tsx b/src/client/views/linking/LinkEditor.tsx
index ac4f8a3cf..b7f3dd995 100644
--- a/src/client/views/linking/LinkEditor.tsx
+++ b/src/client/views/linking/LinkEditor.tsx
@@ -253,8 +253,8 @@ export class LinkGroupEditor extends React.Component<LinkGroupEditorProps> {
render() {
const groupType = StrCast(this.props.groupDoc.linkRelationship);
// if ((groupType && LinkManager.Instance.getMetadataKeysInGroup(groupType).length > 0) || groupType === "") {
- let buttons = <button className="linkEditor-button" disabled={groupType === ""} onClick={() => this.deleteGroup(groupType)} title="Delete Relationship from all links"><FontAwesomeIcon icon="trash" size="sm" /></button>;
- let addButton = <button className="linkEditor-addbutton" onClick={() => this.addMetadata(groupType)} disabled={groupType === ""} title="Add metadata to relationship"><FontAwesomeIcon icon="plus" size="sm" /></button>;
+ const buttons = <button className="linkEditor-button" disabled={groupType === ""} onClick={() => this.deleteGroup(groupType)} title="Delete Relationship from all links"><FontAwesomeIcon icon="trash" size="sm" /></button>;
+ const addButton = <button className="linkEditor-addbutton" onClick={() => this.addMetadata(groupType)} disabled={groupType === ""} title="Add metadata to relationship"><FontAwesomeIcon icon="plus" size="sm" /></button>;
return (
<div className="linkEditor-group">
diff --git a/src/client/views/nodes/AudioBox.scss b/src/client/views/nodes/AudioBox.scss
index 83cdf3574..fb16b8365 100644
--- a/src/client/views/nodes/AudioBox.scss
+++ b/src/client/views/nodes/AudioBox.scss
@@ -5,6 +5,11 @@
display:flex;
pointer-events: all;
cursor:default;
+ .audiobox-buttons {
+ display: flex;
+ width: 100%;
+ align-items: center;
+ }
.audiobox-handle {
width:20px;
height:100%;
@@ -24,13 +29,14 @@
pointer-events: all;
width:100%;
height:100%;
- position: absolute;
+ position: relative;
pointer-events: none;
}
.audiobox-record-interactive {
pointer-events: all;
width:100%;
height:100%;
+ position: relative;
}
.audiobox-controls {
width:100%;
@@ -46,13 +52,18 @@
position: relative;
padding-right: 5px;
display: flex;
- .audiobox-playhead {
+ .audiobox-playhead, .audiobox-dictation {
position: relative;
margin-top: auto;
margin-bottom: auto;
width: 25px;
padding: 2px;
}
+ .audiobox-dictation {
+ align-items: center;
+ display: inherit;
+ background: dimgray;
+ }
.audiobox-timeline {
position:relative;
height:100%;
diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx
index ea26cc43d..017dc6286 100644
--- a/src/client/views/nodes/AudioBox.tsx
+++ b/src/client/views/nodes/AudioBox.tsx
@@ -20,6 +20,8 @@ import { DocumentView } from "./DocumentView";
import { Docs } from "../../documents/Documents";
import { ComputedField } from "../../../new_fields/ScriptField";
+// testing testing
+
interface Window {
MediaRecorder: MediaRecorder;
}
@@ -46,8 +48,8 @@ export class AudioBox extends DocExtendableComponent<FieldViewProps, AudioDocume
_ele: HTMLAudioElement | null = null;
_recorder: any;
_recordStart = 0;
+ _stream: MediaStream | undefined;
- public static START = 0;
@observable private static _scrubTime = 0;
@computed get audioState(): undefined | "recording" | "paused" | "playing" { return this.dataDoc.audioState as (undefined | "recording" | "paused" | "playing"); }
set audioState(value) { this.dataDoc.audioState = value; }
@@ -137,15 +139,13 @@ export class AudioBox extends DocExtendableComponent<FieldViewProps, AudioDocume
}
recordAudioAnnotation = () => {
- let gumStream: any;
const self = this;
navigator.mediaDevices.getUserMedia({
audio: true
}).then(function (stream) {
- gumStream = stream;
+ self._stream = stream;
self._recorder = new MediaRecorder(stream);
self.dataDoc[self.props.fieldKey + "-recordingStart"] = new DateField(new Date());
- AudioBox.START = new DateField(new Date()).date.getTime();
AudioBox.ActiveRecordings.push(self.props.Document);
self._recorder.ondataavailable = async function (e: any) {
const formData = new FormData();
@@ -154,23 +154,16 @@ export class AudioBox extends DocExtendableComponent<FieldViewProps, AudioDocume
method: 'POST',
body: formData
});
- const json = await res.json();
- json.map(async (file: any) => {
- const path = file.result.accessPaths.agnostic.client;
- const url = Utils.prepend(path);
- // upload to server with known URL
- self.props.Document[self.props.fieldKey] = new AudioField(url);
- });
+ const files = await res.json();
+ const url = Utils.prepend(files[0].result.accessPaths.agnostic.client);
+ // upload to server with known URL
+ self.props.Document[self.props.fieldKey] = new AudioField(url);
};
self._recordStart = new Date().getTime();
- console.log("RECORD START = " + self._recordStart);
runInAction(() => self.audioState = "recording");
setTimeout(self.updateRecordTime, 0);
self._recorder.start();
- setTimeout(() => {
- self.stopRecording();
- gumStream.getAudioTracks()[0].stop();
- }, 60 * 60 * 1000); // stop after an hour?
+ setTimeout(() => self._recorder && self.stopRecording(), 60 * 1000); // stop after an hour
});
}
@@ -183,8 +176,10 @@ export class AudioBox extends DocExtendableComponent<FieldViewProps, AudioDocume
stopRecording = action(() => {
this._recorder.stop();
+ this._recorder = undefined;
this.dataDoc.duration = (new Date().getTime() - this._recordStart) / 1000;
this.audioState = "paused";
+ this._stream?.getAudioTracks()[0].stop();
const ind = AudioBox.ActiveRecordings.indexOf(this.props.Document);
ind !== -1 && (AudioBox.ActiveRecordings.splice(ind, 1));
});
@@ -211,7 +206,7 @@ export class AudioBox extends DocExtendableComponent<FieldViewProps, AudioDocume
_width: NumCast(this.props.Document._width), _height: 3 * NumCast(this.props.Document._height)
});
Doc.GetProto(newDoc).recordingSource = this.dataDoc;
- Doc.GetProto(newDoc).recordingStart = 0;
+ Doc.GetProto(newDoc).recordingStart = ComputedField.MakeFunction(`this.recordingSource["${this.props.fieldKey}-recordingStart"]`);
Doc.GetProto(newDoc).audioState = ComputedField.MakeFunction("this.recordingSource.audioState");
this.props.addDocument?.(newDoc);
e.stopPropagation();
@@ -239,14 +234,13 @@ export class AudioBox extends DocExtendableComponent<FieldViewProps, AudioDocume
render() {
const interactive = this.active() ? "-interactive" : "";
- return <div className={`audiobox-container`} onContextMenu={this.specificContextMenu}
- onClick={!this.path ? this.recordClick : undefined}>
+ return <div className={`audiobox-container`} onContextMenu={this.specificContextMenu} onClick={!this.path ? this.recordClick : undefined}>
{!this.path ?
- <div style={{ display: "flex", width: "100%", alignItems: "center" }}>
- <div className="audiobox-playhead" style={{ alignItems: "center", display: "inherit", background: "dimgray" }} onClick={this.onFile}>
+ <div className="audiobox-buttons">
+ <div className="audiobox-dictation" onClick={this.onFile}>
<FontAwesomeIcon style={{ width: "30px", background: this.Document.playOnSelect ? "yellow" : "dimGray" }} icon="file-alt" size={this.props.PanelHeight() < 36 ? "1x" : "2x"} />
</div>
- <button className={`audiobox-record${interactive}`} style={{ position: "relative", backgroundColor: this.audioState === "recording" ? "red" : "black" }}>
+ <button className={`audiobox-record${interactive}`} style={{ backgroundColor: this.audioState === "recording" ? "red" : "black" }}>
{this.audioState === "recording" ? "STOP" : "RECORD"}
</button>
</div> :
diff --git a/src/client/views/nodes/DocuLinkBox.tsx b/src/client/views/nodes/DocuLinkBox.tsx
index a0f5b3152..bc663d084 100644
--- a/src/client/views/nodes/DocuLinkBox.tsx
+++ b/src/client/views/nodes/DocuLinkBox.tsx
@@ -113,7 +113,7 @@ export class DocuLinkBox extends DocComponent<FieldViewProps, DocLinkSchema>(Doc
openLinkEditor = action((e: React.MouseEvent) => {
SelectionManager.DeselectAll();
this._editing = this._forceOpen = true;
- })
+ });
specificContextMenu = (e: React.MouseEvent): void => {
const funcs: ContextMenuProps[] = [];
@@ -135,7 +135,7 @@ export class DocuLinkBox extends DocComponent<FieldViewProps, DocLinkSchema>(Doc
const targetTitle = StrCast((this.props.Document[anchor]! as Doc).title) + (timecode !== undefined ? ":" + timecode : "");
const flyout = (
<div className="docuLinkBox-flyout" title=" " onPointerOver={() => Doc.UnBrushDoc(this.props.Document)}>
- <LinkEditor sourceDoc={Cast(this.props.Document[this.props.fieldKey], Doc, null)!} hideback={true} linkDoc={this.props.Document} showLinks={action(() => { })} />
+ <LinkEditor sourceDoc={Cast(this.props.Document[this.props.fieldKey], Doc, null)} hideback={true} linkDoc={this.props.Document} showLinks={action(() => { })} />
{!this._forceOpen ? (null) : <div className="docuLinkBox-linkCloser" onPointerDown={action(() => this._isOpen = this._editing = this._forceOpen = false)}>
<FontAwesomeIcon color="dimGray" icon={"times"} size={"sm"} />
</div>}
diff --git a/src/client/views/nodes/DocumentView.tsx b/src/client/views/nodes/DocumentView.tsx
index bf5f936d1..4e8e973a0 100644
--- a/src/client/views/nodes/DocumentView.tsx
+++ b/src/client/views/nodes/DocumentView.tsx
@@ -15,7 +15,7 @@ import { AudioField, ImageField, PdfField, VideoField } from '../../../new_field
import { TraceMobx } from '../../../new_fields/util';
import { GestureUtils } from '../../../pen-gestures/GestureUtils';
import { CurrentUserUtils } from "../../../server/authentication/models/current_user_utils";
-import { emptyFunction, returnOne, returnTransparent, returnTrue, Utils } from "../../../Utils";
+import { emptyFunction, returnOne, returnTransparent, returnTrue, Utils, OmitKeys } from "../../../Utils";
import { GooglePhotos } from '../../apis/google_docs/GooglePhotosClientUtils';
import { DocServer } from "../../DocServer";
import { Docs, DocumentOptions, DocUtils } from "../../documents/Documents";
@@ -575,9 +575,9 @@ export class DocumentView extends DocComponent<DocumentViewProps, Document>(Docu
else if (de.complete.docDragData.draggedDocuments[0].type === "text") {
const text = Cast(de.complete.docDragData.draggedDocuments[0].data, RichTextField)?.Text;
if (text && text[0] === "{" && text[text.length - 1] === "}" && text.includes(":")) {
- let loc = text.indexOf(":");
- let key = text.slice(1, loc);
- let value = text.slice(loc + 1, text.length - 1);
+ const loc = text.indexOf(":");
+ const key = text.slice(1, loc);
+ const value = text.slice(loc + 1, text.length - 1);
console.log(key);
console.log(value);
console.log(this.props.Document);
@@ -770,7 +770,7 @@ export class DocumentView extends DocComponent<DocumentViewProps, Document>(Docu
// a.download = `DocExport-${this.props.Document[Id]}.zip`;
// a.click();
});
- let recommender_subitems: ContextMenuProps[] = [];
+ const recommender_subitems: ContextMenuProps[] = [];
recommender_subitems.push({
description: "Internal recommendations",
@@ -778,7 +778,7 @@ export class DocumentView extends DocComponent<DocumentViewProps, Document>(Docu
icon: "brain"
});
- let ext_recommender_subitems: ContextMenuProps[] = [];
+ const ext_recommender_subitems: ContextMenuProps[] = [];
ext_recommender_subitems.push({
description: "arXiv",
@@ -886,7 +886,7 @@ export class DocumentView extends DocComponent<DocumentViewProps, Document>(Docu
}
}));
const doclist = ClientRecommender.Instance.computeSimilarities("cosine");
- let recDocs: { preview: Doc, score: number }[] = [];
+ const recDocs: { preview: Doc, score: number }[] = [];
// tslint:disable-next-line: prefer-for-of
for (let i = 0; i < doclist.length; i++) {
recDocs.push({ preview: doclist[i].actualDoc, score: doclist[i].score });
@@ -970,7 +970,7 @@ export class DocumentView extends DocComponent<DocumentViewProps, Document>(Docu
childScaling = () => (this.layoutDoc._fitWidth ? this.props.PanelWidth() / this.nativeWidth : this.props.ContentScaling());
@computed get contents() {
TraceMobx();
- return (<DocumentContentsView {...this.props}
+ return (<DocumentContentsView {...OmitKeys(this.props, ['children']).omit}
ContentScaling={this.childScaling}
ChromeHeight={this.chromeHeight}
isSelected={this.isSelected}
@@ -1127,7 +1127,7 @@ export class DocumentView extends DocComponent<DocumentViewProps, Document>(Docu
</> :
this.innards}
</div>;
- { this._showKPQuery ? <KeyphraseQueryView keyphrases={this._queries}></KeyphraseQueryView> : undefined }
+ { this._showKPQuery ? <KeyphraseQueryView keyphrases={this._queries}></KeyphraseQueryView> : undefined; }
}
}
diff --git a/src/client/views/nodes/FormattedTextBox.tsx b/src/client/views/nodes/FormattedTextBox.tsx
index 7f5f8538a..1fa603cbd 100644
--- a/src/client/views/nodes/FormattedTextBox.tsx
+++ b/src/client/views/nodes/FormattedTextBox.tsx
@@ -20,7 +20,7 @@ import { InkTool } from '../../../new_fields/InkField';
import { RichTextField } from "../../../new_fields/RichTextField";
import { RichTextUtils } from '../../../new_fields/RichTextUtils';
import { createSchema, makeInterface } from "../../../new_fields/Schema";
-import { Cast, NumCast, StrCast, BoolCast } from "../../../new_fields/Types";
+import { Cast, NumCast, StrCast, BoolCast, DateCast } from "../../../new_fields/Types";
import { TraceMobx } from '../../../new_fields/util';
import { addStyleSheet, addStyleSheetRule, clearStyleSheetRules, emptyFunction, numberRange, returnOne, Utils, returnTrue } from '../../../Utils';
import { GoogleApiClientUtils, Pulls, Pushes } from '../../apis/google_docs/GoogleApiClientUtils';
@@ -420,11 +420,11 @@ export class FormattedTextBox extends DocAnnotatableComponent<(FieldViewProps &
//this._editorView!.focus();
});
}
- stopDictation = (abort: boolean) => { DictationManager.Controls.stop(!abort); }
+ stopDictation = (abort: boolean) => { DictationManager.Controls.stop(!abort); };
@action
toggleMenubar = () => {
- this.props.Document._chromeStatus = this.props.Document._chromeStatus == "disabled" ? "enabled" : "disabled";
+ this.props.Document._chromeStatus = this.props.Document._chromeStatus === "disabled" ? "enabled" : "disabled";
}
recordBullet = async () => {
@@ -444,12 +444,8 @@ export class FormattedTextBox extends DocAnnotatableComponent<(FieldViewProps &
setCurrentBulletContent = (value: string) => {
if (this._editorView) {
- let state = this._editorView.state;
- let now = Date.now();
- if (NumCast(this.props.Document.recordingStart, -1) === 0) {
- this.props.Document.recordingStart = now = AudioBox.START;
- }
- console.log("NOW = " + (now - AudioBox.START) / 1000);
+ const state = this._editorView.state;
+ const now = Date.now();
let mark = schema.marks.user_mark.create({ userid: Doc.CurrentUserEmail, modified: Math.floor(now / 1000) });
if (!this._break && state.selection.to !== state.selection.from) {
for (let i = state.selection.from; i <= state.selection.to; i++) {
@@ -461,9 +457,9 @@ export class FormattedTextBox extends DocAnnotatableComponent<(FieldViewProps &
}
}
}
+ const recordingStart = DateCast(this.props.Document.recordingStart).date.getTime();
this._break = false;
- console.log("start = " + (mark.attrs.modified * 1000 - AudioBox.START) / 1000);
- value = "" + (mark.attrs.modified * 1000 - AudioBox.START) / 1000 + value;
+ value = "" + (mark.attrs.modified * 1000 - recordingStart) / 1000 + value;
const from = state.selection.from;
const inserted = state.tr.insertText(value).addMark(from, from + value.length + 1, mark);
this._editorView.dispatch(inserted.setSelection(TextSelection.create(inserted.doc, from, from + value.length + 1)));
@@ -867,7 +863,7 @@ export class FormattedTextBox extends DocAnnotatableComponent<(FieldViewProps &
if (this._recording && !e.ctrlKey && e.button === 0) {
this.stopDictation(true);
this._break = true;
- let state = this._editorView!.state;
+ const state = this._editorView!.state;
const to = state.selection.to;
const updated = TextSelection.create(state.doc, to, to);
this._editorView!.dispatch(this._editorView!.state.tr.setSelection(updated).insertText("\n", to));
diff --git a/src/client/views/nodes/FormattedTextBoxComment.tsx b/src/client/views/nodes/FormattedTextBoxComment.tsx
index a3096f60b..61df188f8 100644
--- a/src/client/views/nodes/FormattedTextBoxComment.tsx
+++ b/src/client/views/nodes/FormattedTextBoxComment.tsx
@@ -83,7 +83,7 @@ export class FormattedTextBoxComment {
const keep = e.target && (e.target as any).type === "checkbox" ? true : false;
const textBox = FormattedTextBoxComment.textBox;
if (FormattedTextBoxComment.linkDoc && !keep && textBox) {
- DocumentManager.Instance.FollowLink(FormattedTextBoxComment.linkDoc, textBox.dataDoc,
+ DocumentManager.Instance.FollowLink(FormattedTextBoxComment.linkDoc, textBox.props.Document,
(doc: Doc, maxLocation: string) => textBox.props.addDocTab(doc, e.ctrlKey ? "inTab" : "onRight"));
} else if (textBox && (FormattedTextBoxComment.tooltipText as any).href) {
textBox.props.addDocTab(Docs.Create.WebDocument((FormattedTextBoxComment.tooltipText as any).href, { title: (FormattedTextBoxComment.tooltipText as any).href, _width: 200, _height: 400 }), "onRight");
diff --git a/src/client/views/nodes/PDFBox.tsx b/src/client/views/nodes/PDFBox.tsx
index f47620c24..4076128b2 100644
--- a/src/client/views/nodes/PDFBox.tsx
+++ b/src/client/views/nodes/PDFBox.tsx
@@ -94,11 +94,11 @@ export class PDFBox extends DocAnnotatableComponent<FieldViewProps, PdfDocument>
!this.Document._fitWidth && (this.Document._height = this.Document[WidthSym]() * (nh / nw));
}
- public search = (string: string, fwd: boolean) => { this._pdfViewer?.search(string, fwd); }
- public prevAnnotation = () => { this._pdfViewer?.prevAnnotation(); }
- public nextAnnotation = () => { this._pdfViewer?.nextAnnotation(); }
- public backPage = () => { this._pdfViewer!.gotoPage((this.Document.curPage || 1) - 1); }
- public forwardPage = () => { this._pdfViewer!.gotoPage((this.Document.curPage || 1) + 1); }
+ public search = (string: string, fwd: boolean) => { this._pdfViewer?.search(string, fwd); };
+ public prevAnnotation = () => { this._pdfViewer?.prevAnnotation(); };
+ public nextAnnotation = () => { this._pdfViewer?.nextAnnotation(); };
+ public backPage = () => { this._pdfViewer!.gotoPage((this.Document.curPage || 1) - 1); };
+ public forwardPage = () => { this._pdfViewer!.gotoPage((this.Document.curPage || 1) + 1); };
public gotoPage = (p: number) => { this._pdfViewer!.gotoPage(p); };
@undoBatch
diff --git a/src/client/views/nodes/WebBox.tsx b/src/client/views/nodes/WebBox.tsx
index c169d9423..2f8b6167f 100644
--- a/src/client/views/nodes/WebBox.tsx
+++ b/src/client/views/nodes/WebBox.tsx
@@ -36,7 +36,7 @@ export class WebBox extends DocAnnotatableComponent<FieldViewProps, WebDocument>
public static LayoutString(fieldKey: string) { return FieldView.LayoutString(WebBox, fieldKey); }
@observable private collapsed: boolean = true;
- @observable private url: string = "";
+ @observable private url: string = "hello";
private _longPressSecondsHack?: NodeJS.Timeout;
private _iframeRef = React.createRef<HTMLIFrameElement>();
diff --git a/src/new_fields/Schema.ts b/src/new_fields/Schema.ts
index 3f0ff4284..72bce283d 100644
--- a/src/new_fields/Schema.ts
+++ b/src/new_fields/Schema.ts
@@ -33,7 +33,7 @@ export function makeInterface<T extends Interface[]>(...schemas: T): InterfaceFu
get(target: any, prop, receiver) {
const field = receiver.doc[prop];
if (prop in schema) {
- const desc = (schema as any)[prop];
+ const desc = prop === "proto" ? Doc : (schema as any)[prop]; // bcz: proto doesn't appear in schemas ... maybe it should?
if (typeof desc === "object" && "defaultVal" in desc && "type" in desc) {//defaultSpec
return Cast(field, desc.type, desc.defaultVal);
} else if (typeof desc === "function" && !ObjectField.isPrototypeOf(desc) && !RefField.isPrototypeOf(desc)) {
diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts
index 50a759c9d..42e33ece0 100644
--- a/src/server/ApiManagers/UploadManager.ts
+++ b/src/server/ApiManagers/UploadManager.ts
@@ -19,7 +19,8 @@ export enum Directory {
videos = "videos",
pdfs = "pdfs",
text = "text",
- pdf_thumbnails = "pdf_thumbnails"
+ pdf_thumbnails = "pdf_thumbnails",
+ audio = "audio"
}
export function serverPathToFile(directory: Directory, filename: string) {
@@ -61,9 +62,18 @@ export default class UploadManager extends ApiManager {
});
register({
+ method: Method.GET,
+ subscription: "/hello",
+ secureHandler: ({ req, res }) => {
+ res.send("<h1>world!</h1>");
+ }
+ });
+
+ register({
method: Method.POST,
subscription: "/uploadRemoteImage",
secureHandler: async ({ req, res }) => {
+
const { sources } = req.body;
if (Array.isArray(sources)) {
const results = await Promise.all(sources.map(source => DashUploadUtils.UploadImage(source)));
@@ -77,6 +87,7 @@ export default class UploadManager extends ApiManager {
method: Method.POST,
subscription: "/uploadDoc",
secureHandler: ({ req, res }) => {
+
const form = new formidable.IncomingForm();
form.keepExtensions = true;
// let path = req.body.path;
@@ -181,6 +192,7 @@ export default class UploadManager extends ApiManager {
method: Method.POST,
subscription: "/inspectImage",
secureHandler: async ({ req, res }) => {
+
const { source } = req.body;
if (typeof source === "string") {
return res.send(await DashUploadUtils.InspectImage(source));
diff --git a/src/server/ApiManagers/UtilManager.ts b/src/server/ApiManagers/UtilManager.ts
index d18529cf2..ad8119bf4 100644
--- a/src/server/ApiManagers/UtilManager.ts
+++ b/src/server/ApiManagers/UtilManager.ts
@@ -3,11 +3,11 @@ import { Method } from "../RouteManager";
import { exec } from 'child_process';
import RouteSubscriber from "../RouteSubscriber";
import { red } from "colors";
-import { IBM_Recommender } from "../../client/apis/IBM_Recommender";
-import { Recommender } from "../Recommender";
+// import { IBM_Recommender } from "../../client/apis/IBM_Recommender";
+// import { Recommender } from "../Recommender";
-const recommender = new Recommender();
-recommender.testModel();
+// const recommender = new Recommender();
+// recommender.testModel();
import executeImport from "../../scraping/buxton/final/BuxtonImporter";
export default class UtilManager extends ApiManager {
@@ -27,25 +27,25 @@ export default class UtilManager extends ApiManager {
}
});
- register({
- method: Method.POST,
- subscription: "/IBMAnalysis",
- secureHandler: async ({ req, res }) => res.send(await IBM_Recommender.analyze(req.body))
- });
+ // register({
+ // method: Method.POST,
+ // subscription: "/IBMAnalysis",
+ // secureHandler: async ({ req, res }) => res.send(await IBM_Recommender.analyze(req.body))
+ // });
- register({
- method: Method.POST,
- subscription: "/recommender",
- secureHandler: async ({ req, res }) => {
- const keyphrases = req.body.keyphrases;
- const wordvecs = await recommender.vectorize(keyphrases);
- let embedding: Float32Array = new Float32Array();
- if (wordvecs && wordvecs.dataSync()) {
- embedding = wordvecs.dataSync() as Float32Array;
- }
- res.send(embedding);
- }
- });
+ // register({
+ // method: Method.POST,
+ // subscription: "/recommender",
+ // secureHandler: async ({ req, res }) => {
+ // const keyphrases = req.body.keyphrases;
+ // const wordvecs = await recommender.vectorize(keyphrases);
+ // let embedding: Float32Array = new Float32Array();
+ // if (wordvecs && wordvecs.dataSync()) {
+ // embedding = wordvecs.dataSync() as Float32Array;
+ // }
+ // res.send(embedding);
+ // }
+ // });
register({
diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts
index cc3dd75a4..cf78af60a 100644
--- a/src/server/DashUploadUtils.ts
+++ b/src/server/DashUploadUtils.ts
@@ -53,7 +53,7 @@ export namespace DashUploadUtils {
const size = "content-length";
const type = "content-type";
- const { imageFormats, videoFormats, applicationFormats } = AcceptibleMedia;
+ const { imageFormats, videoFormats, applicationFormats, audioFormats } = AcceptibleMedia;
export async function upload(file: File): Promise<Upload.FileResponse> {
const { type, path, name } = file;
@@ -76,14 +76,22 @@ export namespace DashUploadUtils {
if (applicationFormats.includes(format)) {
return UploadPdf(file);
}
- default: // "blob":
- return MoveParsedFile(file, Directory.videos);
+ case "audio":
+ if (audioFormats.includes(format)) {
+ return MoveParsedFile(file, Directory.audio);
+ }
}
console.log(red(`Ignoring unsupported file (${name}) with upload type (${type}).`));
return { source: file, result: new Error(`Could not upload unsupported file (${name}) with upload type (${type}).`) };
}
+ async function UploadAudio(file: File) {
+ const { path: sourcePath } = file;
+
+ return MoveParsedFile(file, Directory.audio);
+ }
+
async function UploadPdf(file: File) {
const { path: sourcePath } = file;
const dataBuffer = readFileSync(sourcePath);
@@ -94,6 +102,7 @@ export namespace DashUploadUtils {
const writeStream = createWriteStream(serverPathToFile(Directory.text, textFilename));
writeStream.write(result.text, error => error ? reject(error) : resolve());
});
+ console.log(MoveParsedFile(file, Directory.pdfs));
return MoveParsedFile(file, Directory.pdfs);
}
@@ -197,8 +206,10 @@ export namespace DashUploadUtils {
accessPaths: {
agnostic: getAccessPaths(destination, name)
}
+
}
- });
+ }
+ );
});
});
}
diff --git a/src/server/Recommender.ts b/src/server/Recommender.ts
index 1d2cb3858..aacdb4053 100644
--- a/src/server/Recommender.ts
+++ b/src/server/Recommender.ts
@@ -1,137 +1,137 @@
-//import { Doc } from "../new_fields/Doc";
-//import { StrCast } from "../new_fields/Types";
-//import { List } from "../new_fields/List";
-//import { CognitiveServices } from "../client/cognitive_services/CognitiveServices";
-
-// var w2v = require('word2vec');
-var assert = require('assert');
-var arxivapi = require('arxiv-api-node');
-import requestPromise = require("request-promise");
-import * as use from '@tensorflow-models/universal-sentence-encoder';
-import { Tensor } from "@tensorflow/tfjs-core/dist/tensor";
-require('@tensorflow/tfjs-node');
-
-//http://gnuwin32.sourceforge.net/packages/make.htm
-
-export class Recommender {
-
- private _model: any;
- static Instance: Recommender;
- private dimension: number = 0;
- private choice: string = ""; // Tensorflow or Word2Vec
-
- constructor() {
- console.log("creating recommender...");
- Recommender.Instance = this;
- }
-
- /***
- * Loads pre-trained model from TF
- */
-
- public async loadTFModel() {
- let self = this;
- return new Promise(res => {
- use.load().then(model => {
- self.choice = "TF";
- self._model = model;
- self.dimension = 512;
- res(model);
- });
- }
-
- );
- }
-
- /***
- * Loads pre-trained model from word2vec
- */
-
- // private loadModel(): Promise<any> {
- // let self = this;
- // return new Promise(res => {
- // w2v.loadModel("./node_modules/word2vec/examples/fixtures/vectors.txt", function (err: any, model: any) {
- // self.choice = "WV";
- // self._model = model;
- // self.dimension = model.size;
- // res(model);
- // });
- // });
- // }
-
- /***
- * Testing
- */
-
- public async testModel() {
- if (!this._model) {
- await this.loadTFModel();
- }
- if (this._model) {
- if (this.choice === "WV") {
- let similarity = this._model.similarity('father', 'mother');
- console.log(similarity);
- }
- else if (this.choice === "TF") {
- const model = this._model as use.UniversalSentenceEncoder;
- // Embed an array of sentences.
- const sentences = [
- 'Hello.',
- 'How are you?'
- ];
- const embeddings = await this.vectorize(sentences);
- if (embeddings) embeddings.print(true /*verbose*/);
- // model.embed(sentences).then(embeddings => {
- // // `embeddings` is a 2D tensor consisting of the 512-dimensional embeddings for each sentence.
- // // So in this example `embeddings` has the shape [2, 512].
- // embeddings.print(true /* verbose */);
- // });
- }
- }
- else {
- console.log("model not found :(");
- }
- }
-
- /***
- * Uses model to convert words to vectors
- */
-
- public async vectorize(text: string[]): Promise<Tensor | undefined> {
- if (!this._model) {
- await this.loadTFModel();
- }
- if (this._model) {
- if (this.choice === "WV") {
- let word_vecs = this._model.getVectors(text);
- return word_vecs;
- }
- else if (this.choice === "TF") {
- const model = this._model as use.UniversalSentenceEncoder;
- return new Promise<Tensor>(res => {
- model.embed(text).then(embeddings => {
- res(embeddings);
- });
- });
-
- }
- }
- }
-
- // public async trainModel() {
- // console.log("phrasing...");
- // w2v.word2vec("./node_modules/word2vec/examples/eng_news-typical_2016_1M-sentences.txt", './node_modules/word2vec/examples/my_phrases.txt', {
- // cbow: 1,
- // size: 200,
- // window: 8,
- // negative: 25,
- // hs: 0,
- // sample: 1e-4,
- // threads: 20,
- // iter: 200,
- // minCount: 2
- // });
- // console.log("phrased!!!");
- // }
-
-}
+// //import { Doc } from "../new_fields/Doc";
+// //import { StrCast } from "../new_fields/Types";
+// //import { List } from "../new_fields/List";
+// //import { CognitiveServices } from "../client/cognitive_services/CognitiveServices";
+
+// // var w2v = require('word2vec');
+// var assert = require('assert');
+// var arxivapi = require('arxiv-api-node');
+// import requestPromise = require("request-promise");
+// import * as use from '@tensorflow-models/universal-sentence-encoder';
+// import { Tensor } from "@tensorflow/tfjs-core/dist/tensor";
+// require('@tensorflow/tfjs-node');
+
+// //http://gnuwin32.sourceforge.net/packages/make.htm
+
+// export class Recommender {
+
+// private _model: any;
+// static Instance: Recommender;
+// private dimension: number = 0;
+// private choice: string = ""; // Tensorflow or Word2Vec
+
+// constructor() {
+// console.log("creating recommender...");
+// Recommender.Instance = this;
+// }
+
+// /***
+// * Loads pre-trained model from TF
+// */
+
+// public async loadTFModel() {
+// let self = this;
+// return new Promise(res => {
+// use.load().then(model => {
+// self.choice = "TF";
+// self._model = model;
+// self.dimension = 512;
+// res(model);
+// });
+// }
+
+// );
+// }
+
+// /***
+// * Loads pre-trained model from word2vec
+// */
+
+// // private loadModel(): Promise<any> {
+// // let self = this;
+// // return new Promise(res => {
+// // w2v.loadModel("./node_modules/word2vec/examples/fixtures/vectors.txt", function (err: any, model: any) {
+// // self.choice = "WV";
+// // self._model = model;
+// // self.dimension = model.size;
+// // res(model);
+// // });
+// // });
+// // }
+
+// /***
+// * Testing
+// */
+
+// public async testModel() {
+// if (!this._model) {
+// await this.loadTFModel();
+// }
+// if (this._model) {
+// if (this.choice === "WV") {
+// let similarity = this._model.similarity('father', 'mother');
+// console.log(similarity);
+// }
+// else if (this.choice === "TF") {
+// const model = this._model as use.UniversalSentenceEncoder;
+// // Embed an array of sentences.
+// const sentences = [
+// 'Hello.',
+// 'How are you?'
+// ];
+// const embeddings = await this.vectorize(sentences);
+// if (embeddings) embeddings.print(true /*verbose*/);
+// // model.embed(sentences).then(embeddings => {
+// // // `embeddings` is a 2D tensor consisting of the 512-dimensional embeddings for each sentence.
+// // // So in this example `embeddings` has the shape [2, 512].
+// // embeddings.print(true /* verbose */);
+// // });
+// }
+// }
+// else {
+// console.log("model not found :(");
+// }
+// }
+
+// /***
+// * Uses model to convert words to vectors
+// */
+
+// public async vectorize(text: string[]): Promise<Tensor | undefined> {
+// if (!this._model) {
+// await this.loadTFModel();
+// }
+// if (this._model) {
+// if (this.choice === "WV") {
+// let word_vecs = this._model.getVectors(text);
+// return word_vecs;
+// }
+// else if (this.choice === "TF") {
+// const model = this._model as use.UniversalSentenceEncoder;
+// return new Promise<Tensor>(res => {
+// model.embed(text).then(embeddings => {
+// res(embeddings);
+// });
+// });
+
+// }
+// }
+// }
+
+// // public async trainModel() {
+// // console.log("phrasing...");
+// // w2v.word2vec("./node_modules/word2vec/examples/eng_news-typical_2016_1M-sentences.txt", './node_modules/word2vec/examples/my_phrases.txt', {
+// // cbow: 1,
+// // size: 200,
+// // window: 8,
+// // negative: 25,
+// // hs: 0,
+// // sample: 1e-4,
+// // threads: 20,
+// // iter: 200,
+// // minCount: 2
+// // });
+// // console.log("phrased!!!");
+// // }
+
+// }
diff --git a/src/server/SharedMediaTypes.ts b/src/server/SharedMediaTypes.ts
index 185e787cc..3d3683912 100644
--- a/src/server/SharedMediaTypes.ts
+++ b/src/server/SharedMediaTypes.ts
@@ -10,6 +10,7 @@ export namespace AcceptibleMedia {
export const imageFormats = [...pngs, ...jpgs, ...gifs, ...webps, ...tiffs];
export const videoFormats = [".mov", ".mp4"];
export const applicationFormats = [".pdf"];
+ export const audioFormats = [".wav", ".mp3", ".flac", ".au", ".aiff", ".m4a", ".webm;codecs=opus"];
}
export namespace Upload {