aboutsummaryrefslogtreecommitdiff
path: root/src/server
diff options
context:
space:
mode:
Diffstat (limited to 'src/server')
-rw-r--r--src/server/RouteStore.ts1
-rw-r--r--src/server/authentication/models/current_user_utils.ts70
-rw-r--r--src/server/database.ts124
-rw-r--r--src/server/downsize.ts40
-rw-r--r--src/server/index.ts90
-rw-r--r--src/server/public/files/.gitignore3
-rw-r--r--src/server/remapUrl.ts59
-rw-r--r--src/server/updateSearch.ts101
8 files changed, 394 insertions, 94 deletions
diff --git a/src/server/RouteStore.ts b/src/server/RouteStore.ts
index fdf5b6a5c..c4af5cdaa 100644
--- a/src/server/RouteStore.ts
+++ b/src/server/RouteStore.ts
@@ -11,6 +11,7 @@ export enum RouteStore {
// UPLOAD AND STATIC FILE SERVING
public = "/public",
upload = "/upload",
+ dataUriToImage = "/uploadURI",
images = "/images",
// USER AND WORKSPACES
diff --git a/src/server/authentication/models/current_user_utils.ts b/src/server/authentication/models/current_user_utils.ts
index aef2d3f4a..e5b7a025b 100644
--- a/src/server/authentication/models/current_user_utils.ts
+++ b/src/server/authentication/models/current_user_utils.ts
@@ -1,21 +1,17 @@
-import { computed, observable, action, runInAction } from "mobx";
+import { action, computed, observable, runInAction } from "mobx";
import * as rp from 'request-promise';
+import { DocServer } from "../../../client/DocServer";
import { Docs } from "../../../client/documents/Documents";
-import { Attribute, AttributeGroup, Catalog, Schema, AggregateFunction } from "../../../client/northstar/model/idea/idea";
+import { Gateway, NorthstarSettings } from "../../../client/northstar/manager/Gateway";
+import { Attribute, AttributeGroup, Catalog, Schema } from "../../../client/northstar/model/idea/idea";
import { ArrayUtil } from "../../../client/northstar/utils/ArrayUtil";
-import { RouteStore } from "../../RouteStore";
-import { DocServer } from "../../../client/DocServer";
-import { Doc, Opt, Field } from "../../../new_fields/Doc";
-import { List } from "../../../new_fields/List";
import { CollectionViewType } from "../../../client/views/collections/CollectionBaseView";
-import { CollectionTreeView } from "../../../client/views/collections/CollectionTreeView";
import { CollectionView } from "../../../client/views/collections/CollectionView";
-import { NorthstarSettings, Gateway } from "../../../client/northstar/manager/Gateway";
-import { AttributeTransformationModel } from "../../../client/northstar/core/attribute/AttributeTransformationModel";
-import { ColumnAttributeModel } from "../../../client/northstar/core/attribute/AttributeModel";
-import { HistogramOperation } from "../../../client/northstar/operations/HistogramOperation";
-import { Cast, PromiseValue } from "../../../new_fields/Types";
+import { Doc } from "../../../new_fields/Doc";
+import { List } from "../../../new_fields/List";
import { listSpec } from "../../../new_fields/Schema";
+import { Cast } from "../../../new_fields/Types";
+import { RouteStore } from "../../RouteStore";
export class CurrentUserUtils {
private static curr_email: string;
@@ -37,7 +33,7 @@ export class CurrentUserUtils {
doc.title = this.email;
doc.data = new List<Doc>();
doc.excludeFromLibrary = true;
- doc.optionalRightCollection = Docs.SchemaDocument(["title"], [], { title: "Pending documents" });
+ doc.optionalRightCollection = Docs.StackingDocument([], { title: "New mobile uploads" });
// doc.library = Docs.TreeDocument([doc], { title: `Library: ${CurrentUserUtils.email}` });
// (doc.library as Doc).excludeFromLibrary = true;
return doc;
@@ -68,7 +64,7 @@ export class CurrentUserUtils {
const extraSchemas = Cast(CurrentUserUtils.UserDocument.DBSchemas, listSpec("string"), []);
let extras = await Promise.all(extraSchemas.map(sc => Gateway.Instance.GetSchema("", sc)));
let catprom = CurrentUserUtils.SetNorthstarCatalog(await Gateway.Instance.GetCatalog(), extras);
- if (catprom) await Promise.all(catprom);
+ // if (catprom) await Promise.all(catprom);
} catch (e) {
}
@@ -83,29 +79,29 @@ export class CurrentUserUtils {
@action static SetNorthstarCatalog(ctlog: Catalog, extras: Catalog[]) {
CurrentUserUtils.NorthstarDBCatalog = ctlog;
- if (ctlog && ctlog.schemas) {
- extras.map(ex => ctlog.schemas!.push(ex));
- return ctlog.schemas.map(async schema => {
- let schemaDocuments: Doc[] = [];
- let attributesToBecomeDocs = CurrentUserUtils.GetAllNorthstarColumnAttributes(schema);
- await Promise.all(attributesToBecomeDocs.reduce((promises, attr) => {
- promises.push(DocServer.GetRefField(attr.displayName! + ".alias").then(action((field: Opt<Field>) => {
- if (field instanceof Doc) {
- schemaDocuments.push(field);
- } else {
- var atmod = new ColumnAttributeModel(attr);
- let histoOp = new HistogramOperation(schema.displayName!,
- new AttributeTransformationModel(atmod, AggregateFunction.None),
- new AttributeTransformationModel(atmod, AggregateFunction.Count),
- new AttributeTransformationModel(atmod, AggregateFunction.Count));
- schemaDocuments.push(Docs.HistogramDocument(histoOp, { width: 200, height: 200, title: attr.displayName! }));
- }
- })));
- return promises;
- }, [] as Promise<void>[]));
- return CurrentUserUtils._northstarSchemas.push(Docs.TreeDocument(schemaDocuments, { width: 50, height: 100, title: schema.displayName! }));
- });
- }
+ // if (ctlog && ctlog.schemas) {
+ // extras.map(ex => ctlog.schemas!.push(ex));
+ // return ctlog.schemas.map(async schema => {
+ // let schemaDocuments: Doc[] = [];
+ // let attributesToBecomeDocs = CurrentUserUtils.GetAllNorthstarColumnAttributes(schema);
+ // await Promise.all(attributesToBecomeDocs.reduce((promises, attr) => {
+ // promises.push(DocServer.GetRefField(attr.displayName! + ".alias").then(action((field: Opt<Field>) => {
+ // if (field instanceof Doc) {
+ // schemaDocuments.push(field);
+ // } else {
+ // var atmod = new ColumnAttributeModel(attr);
+ // let histoOp = new HistogramOperation(schema.displayName!,
+ // new AttributeTransformationModel(atmod, AggregateFunction.None),
+ // new AttributeTransformationModel(atmod, AggregateFunction.Count),
+ // new AttributeTransformationModel(atmod, AggregateFunction.Count));
+ // schemaDocuments.push(Docs.HistogramDocument(histoOp, { width: 200, height: 200, title: attr.displayName! }));
+ // }
+ // })));
+ // return promises;
+ // }, [] as Promise<void>[]));
+ // return CurrentUserUtils._northstarSchemas.push(Docs.TreeDocument(schemaDocuments, { width: 50, height: 100, title: schema.displayName! }));
+ // });
+ // }
}
public static set NorthstarDBCatalog(ctlog: Catalog | undefined) { this._northstarCatalog = ctlog; }
diff --git a/src/server/database.ts b/src/server/database.ts
index 69005d2d3..70b3efced 100644
--- a/src/server/database.ts
+++ b/src/server/database.ts
@@ -8,9 +8,13 @@ export class Database {
private url = 'mongodb://localhost:27017/Dash';
private currentWrites: { [id: string]: Promise<void> } = {};
private db?: mongodb.Db;
+ private onConnect: (() => void)[] = [];
constructor() {
- this.MongoClient.connect(this.url, (err, client) => this.db = client.db());
+ this.MongoClient.connect(this.url, (err, client) => {
+ this.db = client.db();
+ this.onConnect.forEach(fn => fn());
+ });
}
public update(id: string, value: any, callback: () => void, upsert = true, collectionName = Database.DocumentsCollection) {
@@ -32,66 +36,98 @@ export class Database {
};
newProm = prom ? prom.then(run) : run();
this.currentWrites[id] = newProm;
+ } else {
+ this.onConnect.push(() => this.update(id, value, callback, upsert, collectionName));
}
}
public delete(id: string, collectionName = Database.DocumentsCollection) {
- this.db && this.db.collection(collectionName).remove({ id: id });
+ if (this.db) {
+ this.db.collection(collectionName).remove({ id: id });
+ } else {
+ this.onConnect.push(() => this.delete(id, collectionName));
+ }
}
public deleteAll(collectionName = Database.DocumentsCollection): Promise<any> {
- return new Promise(res =>
- this.db && this.db.collection(collectionName).deleteMany({}, res));
+ return new Promise(res => {
+ if (this.db) {
+ this.db.collection(collectionName).deleteMany({}, res);
+ } else {
+ this.onConnect.push(() => this.db && this.db.collection(collectionName).deleteMany({}, res));
+ }
+ });
}
public insert(value: any, collectionName = Database.DocumentsCollection) {
- if (!this.db) { return; }
- if ("id" in value) {
- value._id = value.id;
- delete value.id;
- }
- const id = value._id;
- const collection = this.db.collection(collectionName);
- const prom = this.currentWrites[id];
- let newProm: Promise<void>;
- const run = (): Promise<void> => {
- return new Promise<void>(resolve => {
- collection.insertOne(value, (err, res) => {
- if (this.currentWrites[id] === newProm) {
- delete this.currentWrites[id];
- }
- resolve();
+ if (this.db) {
+ if ("id" in value) {
+ value._id = value.id;
+ delete value.id;
+ }
+ const id = value._id;
+ const collection = this.db.collection(collectionName);
+ const prom = this.currentWrites[id];
+ let newProm: Promise<void>;
+ const run = (): Promise<void> => {
+ return new Promise<void>(resolve => {
+ collection.insertOne(value, (err, res) => {
+ if (this.currentWrites[id] === newProm) {
+ delete this.currentWrites[id];
+ }
+ resolve();
+ });
});
- });
- };
- newProm = prom ? prom.then(run) : run();
- this.currentWrites[id] = newProm;
+ };
+ newProm = prom ? prom.then(run) : run();
+ this.currentWrites[id] = newProm;
+ } else {
+ this.onConnect.push(() => this.insert(value, collectionName));
+ }
}
public getDocument(id: string, fn: (result?: Transferable) => void, collectionName = Database.DocumentsCollection) {
- this.db && this.db.collection(collectionName).findOne({ _id: id }, (err, result) => {
- if (result) {
- result.id = result._id;
- delete result._id;
- fn(result);
- } else {
- fn(undefined);
- }
- });
+ if (this.db) {
+ this.db.collection(collectionName).findOne({ _id: id }, (err, result) => {
+ if (result) {
+ result.id = result._id;
+ delete result._id;
+ fn(result);
+ } else {
+ fn(undefined);
+ }
+ });
+ } else {
+ this.onConnect.push(() => this.getDocument(id, fn, collectionName));
+ }
}
public getDocuments(ids: string[], fn: (result: Transferable[]) => void, collectionName = Database.DocumentsCollection) {
- this.db && this.db.collection(collectionName).find({ _id: { "$in": ids } }).toArray((err, docs) => {
- if (err) {
- console.log(err.message);
- console.log(err.errmsg);
- }
- fn(docs.map(doc => {
- doc.id = doc._id;
- delete doc._id;
- return doc;
- }));
- });
+ if (this.db) {
+ this.db.collection(collectionName).find({ _id: { "$in": ids } }).toArray((err, docs) => {
+ if (err) {
+ console.log(err.message);
+ console.log(err.errmsg);
+ }
+ fn(docs.map(doc => {
+ doc.id = doc._id;
+ delete doc._id;
+ return doc;
+ }));
+ });
+ } else {
+ this.onConnect.push(() => this.getDocuments(ids, fn, collectionName));
+ }
+ }
+
+ public query(query: any): Promise<mongodb.Cursor> {
+ if (this.db) {
+ return Promise.resolve<mongodb.Cursor>(this.db.collection('newDocuments').find(query));
+ } else {
+ return new Promise<mongodb.Cursor>(res => {
+ this.onConnect.push(() => res(this.query(query)));
+ });
+ }
}
public print() {
diff --git a/src/server/downsize.ts b/src/server/downsize.ts
new file mode 100644
index 000000000..ed68fbecc
--- /dev/null
+++ b/src/server/downsize.ts
@@ -0,0 +1,40 @@
+import * as sharp from 'sharp';
+import * as fs from 'fs';
+
+const folder = "./src/server/public/files/";
+const pngTypes = ["png", "PNG"];
+const jpgTypes = ["jpg", "JPG", "jpeg", "JPEG"];
+const smallResizer = sharp().resize(100);
+fs.readdir(folder, async (err, files) => {
+ if (err) {
+ console.log(err);
+ return;
+ }
+ // files.forEach(file => {
+ // if (file.includes("_s") || file.includes("_m") || file.includes("_l")) {
+ // fs.unlink(folder + file, () => { });
+ // }
+ // });
+ for (const file of files) {
+ const filesplit = file.split(".");
+ let resizers = [
+ { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" },
+ { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: "_m" },
+ { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: "_l" },
+ ];
+ if (pngTypes.some(type => file.endsWith(type))) {
+ resizers.forEach(element => {
+ element.resizer = element.resizer.png();
+ });
+ } else if (jpgTypes.some(type => file.endsWith(type))) {
+ resizers.forEach(element => {
+ element.resizer = element.resizer.jpeg();
+ });
+ } else {
+ continue;
+ }
+ resizers.forEach(resizer => {
+ fs.createReadStream(folder + file).pipe(resizer.resizer).pipe(fs.createWriteStream(folder + filesplit[0] + resizer.suffix + "." + filesplit[1]));
+ });
+ }
+}); \ No newline at end of file
diff --git a/src/server/index.ts b/src/server/index.ts
index d19c65e0a..fd66c90b4 100644
--- a/src/server/index.ts
+++ b/src/server/index.ts
@@ -6,6 +6,8 @@ import * as session from 'express-session';
import * as expressValidator from 'express-validator';
import * as formidable from 'formidable';
import * as fs from 'fs';
+import * as sharp from 'sharp';
+const imageDataUri = require('image-data-uri');
import * as mobileDetect from 'mobile-detect';
import { ObservableMap } from 'mobx';
import * as passport from 'passport';
@@ -58,7 +60,7 @@ app.use(session({
app.use(flash());
app.use(expressFlash());
-app.use(bodyParser.json());
+app.use(bodyParser.json({ limit: "10mb" }));
app.use(bodyParser.urlencoded({ extended: true }));
app.use(expressValidator());
app.use(passport.initialize());
@@ -166,13 +168,15 @@ addSecureRoute(
RouteStore.getCurrUser
);
+const pngTypes = [".png", ".PNG"];
+const jpgTypes = [".jpg", ".JPG", ".jpeg", ".JPEG"];
+const uploadDir = __dirname + "/public/files/";
// SETTERS
-
-addSecureRoute(
- Method.POST,
- (user, res, req) => {
+app.post(
+ RouteStore.upload,
+ (req, res) => {
let form = new formidable.IncomingForm();
- form.uploadDir = __dirname + "/public/files/";
+ form.uploadDir = uploadDir;
form.keepExtensions = true;
// let path = req.body.path;
console.log("upload");
@@ -180,15 +184,76 @@ addSecureRoute(
console.log("parsing");
let names: string[] = [];
for (const name in files) {
- names.push(`/files/` + path.basename(files[name].path));
+ const file = path.basename(files[name].path);
+ const ext = path.extname(file);
+ let resizers = [
+ { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" },
+ { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: "_m" },
+ { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: "_l" },
+ ];
+ let isImage = false;
+ if (pngTypes.includes(ext)) {
+ resizers.forEach(element => {
+ element.resizer = element.resizer.png();
+ });
+ isImage = true;
+ } else if (jpgTypes.includes(ext)) {
+ resizers.forEach(element => {
+ element.resizer = element.resizer.jpeg();
+ });
+ isImage = true;
+ }
+ if (isImage) {
+ resizers.forEach(resizer => {
+ fs.createReadStream(uploadDir + file).pipe(resizer.resizer).pipe(fs.createWriteStream(uploadDir + file.substring(0, file.length - ext.length) + resizer.suffix + ext));
+ });
+ }
+ names.push(`/files/` + file);
}
res.send(names);
});
+ }
+);
+
+addSecureRoute(
+ Method.POST,
+ (user, res, req) => {
+ const uri = req.body.uri;
+ const filename = req.body.name;
+ if (!uri || !filename) {
+ res.status(401).send("incorrect parameters specified");
+ return;
+ }
+ imageDataUri.outputFile(uri, uploadDir + filename).then((savedName: string) => {
+ const ext = path.extname(savedName);
+ let resizers = [
+ { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" },
+ { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: "_m" },
+ { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: "_l" },
+ ];
+ let isImage = false;
+ if (pngTypes.includes(ext)) {
+ resizers.forEach(element => {
+ element.resizer = element.resizer.png();
+ });
+ isImage = true;
+ } else if (jpgTypes.includes(ext)) {
+ resizers.forEach(element => {
+ element.resizer = element.resizer.jpeg();
+ });
+ isImage = true;
+ }
+ if (isImage) {
+ resizers.forEach(resizer => {
+ fs.createReadStream(savedName).pipe(resizer.resizer).pipe(fs.createWriteStream(uploadDir + filename + resizer.suffix + ext));
+ });
+ }
+ res.send("/files/" + filename + ext);
+ });
},
undefined,
- RouteStore.upload
+ RouteStore.dataUriToImage
);
-
// AUTHENTICATION
// Sign Up
@@ -283,6 +348,7 @@ function setField(socket: Socket, newValue: Transferable) {
if (newValue.type === Types.Text) {
Search.Instance.updateDocument({ id: newValue.id, data: (newValue as any).data });
console.log("set field");
+ console.log("checking in");
}
}
@@ -299,7 +365,7 @@ const suffixMap: { [type: string]: (string | [string, string | ((json: any) => a
"number": "_n",
"string": "_t",
// "boolean": "_b",
- "image": ["_t", "url"],
+ // "image": ["_t", "url"],
"video": ["_t", "url"],
"pdf": ["_t", "url"],
"audio": ["_t", "url"],
@@ -362,8 +428,8 @@ function UpdateField(socket: Socket, diff: Diff) {
Object.values(suffixMap).forEach(suf => update[key + getSuffix(suf)] = { set: null });
let term = ToSearchTerm(val);
if (term !== undefined) {
- // let { suffix, value } = term;
- // update[key + suffix] = { set: value };
+ let { suffix, value } = term;
+ update[key + suffix] = { set: value };
}
}
if (dynfield) {
diff --git a/src/server/public/files/.gitignore b/src/server/public/files/.gitignore
index f59ec20aa..c96a04f00 100644
--- a/src/server/public/files/.gitignore
+++ b/src/server/public/files/.gitignore
@@ -1 +1,2 @@
-* \ No newline at end of file
+*
+!.gitignore \ No newline at end of file
diff --git a/src/server/remapUrl.ts b/src/server/remapUrl.ts
new file mode 100644
index 000000000..6f4d6642f
--- /dev/null
+++ b/src/server/remapUrl.ts
@@ -0,0 +1,59 @@
+import { Database } from "./database";
+import { Search } from "./Search";
+import * as path from 'path';
+
+const suffixMap: { [type: string]: true } = {
+ "video": true,
+ "pdf": true,
+ "audio": true,
+ "web": true
+};
+
+async function update() {
+ await new Promise(res => setTimeout(res, 10));
+ console.log("update");
+ const cursor = await Database.Instance.query({});
+ console.log("Cleared");
+ const updates: [string, any][] = [];
+ function updateDoc(doc: any) {
+ if (doc.__type !== "Doc") {
+ return;
+ }
+ const fields = doc.fields;
+ if (!fields) {
+ return;
+ }
+ const update: any = {
+ };
+ let dynfield = false;
+ for (const key in fields) {
+ const value = fields[key];
+ if (value && value.__type && suffixMap[value.__type]) {
+ const url = new URL(value.url);
+ if (url.href.includes("azure")) {
+ dynfield = true;
+
+ update.$set = { ["fields." + key + ".url"]: `${url.protocol}//localhost:1050${url.pathname}` };
+ }
+ }
+ }
+ if (dynfield) {
+ updates.push([doc._id, update]);
+ }
+ }
+ await cursor.forEach(updateDoc);
+ await Promise.all(updates.map(doc => {
+ console.log(doc[0], doc[1]);
+ return new Promise(res => Database.Instance.update(doc[0], doc[1], () => {
+ console.log("wrote " + JSON.stringify(doc[1]));
+ res();
+ }, false, "newDocuments"));
+ }));
+ console.log("Done");
+ // await Promise.all(updates.map(update => {
+ // return limit(() => Search.Instance.updateDocument(update));
+ // }));
+ cursor.close();
+}
+
+update();
diff --git a/src/server/updateSearch.ts b/src/server/updateSearch.ts
new file mode 100644
index 000000000..de1fd25e1
--- /dev/null
+++ b/src/server/updateSearch.ts
@@ -0,0 +1,101 @@
+import { Database } from "./database";
+import { Cursor } from "mongodb";
+import { Search } from "./Search";
+import pLimit from 'p-limit';
+
+const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = {
+ "number": "_n",
+ "string": "_t",
+ // "boolean": "_b",
+ // "image": ["_t", "url"],
+ "video": ["_t", "url"],
+ "pdf": ["_t", "url"],
+ "audio": ["_t", "url"],
+ "web": ["_t", "url"],
+ "date": ["_d", value => new Date(value.date).toISOString()],
+ "proxy": ["_i", "fieldId"],
+ "list": ["_l", list => {
+ const results = [];
+ for (const value of list.fields) {
+ const term = ToSearchTerm(value);
+ if (term) {
+ results.push(term.value);
+ }
+ }
+ return results.length ? results : null;
+ }]
+};
+
+function ToSearchTerm(val: any): { suffix: string, value: any } | undefined {
+ if (val === null || val === undefined) {
+ return;
+ }
+ const type = val.__type || typeof val;
+ let suffix = suffixMap[type];
+ if (!suffix) {
+ return;
+ }
+
+ if (Array.isArray(suffix)) {
+ const accessor = suffix[1];
+ if (typeof accessor === "function") {
+ val = accessor(val);
+ } else {
+ val = val[accessor];
+ }
+ suffix = suffix[0];
+ }
+
+ return { suffix, value: val };
+}
+
+function getSuffix(value: string | [string, any]): string {
+ return typeof value === "string" ? value : value[0];
+}
+
+const limit = pLimit(5);
+async function update() {
+ // await new Promise(res => setTimeout(res, 5));
+ console.log("update");
+ await Search.Instance.clear();
+ const cursor = await Database.Instance.query({});
+ console.log("Cleared");
+ const updates: any[] = [];
+ let numDocs = 0;
+ function updateDoc(doc: any) {
+ numDocs++;
+ if ((numDocs % 50) === 0) {
+ console.log("updateDoc " + numDocs);
+ }
+ console.log("doc " + numDocs);
+ if (doc.__type !== "Doc") {
+ return;
+ }
+ const fields = doc.fields;
+ if (!fields) {
+ return;
+ }
+ const update: any = { id: doc._id };
+ let dynfield = false;
+ for (const key in fields) {
+ const value = fields[key];
+ const term = ToSearchTerm(value);
+ if (term !== undefined) {
+ let { suffix, value } = term;
+ update[key + suffix] = value;
+ dynfield = true;
+ }
+ }
+ if (dynfield) {
+ updates.push(update);
+ console.log(updates.length);
+ }
+ }
+ await cursor.forEach(updateDoc);
+ await Promise.all(updates.map(update => {
+ return limit(() => Search.Instance.updateDocument(update));
+ }));
+ cursor.close();
+}
+
+update(); \ No newline at end of file