diff options
Diffstat (limited to 'src/server')
| -rw-r--r-- | src/server/RouteStore.ts | 1 | ||||
| -rw-r--r-- | src/server/Search.ts | 49 | ||||
| -rw-r--r-- | src/server/authentication/models/current_user_utils.ts | 69 | ||||
| -rw-r--r-- | src/server/database.ts | 115 | ||||
| -rw-r--r-- | src/server/downsize.ts | 40 | ||||
| -rw-r--r-- | src/server/index.ts | 176 | ||||
| -rw-r--r-- | src/server/public/files/.gitignore | 3 | ||||
| -rw-r--r-- | src/server/remapUrl.ts | 59 | ||||
| -rw-r--r-- | src/server/updateSearch.ts | 101 | 
9 files changed, 558 insertions, 55 deletions
| diff --git a/src/server/RouteStore.ts b/src/server/RouteStore.ts index fdf5b6a5c..c4af5cdaa 100644 --- a/src/server/RouteStore.ts +++ b/src/server/RouteStore.ts @@ -11,6 +11,7 @@ export enum RouteStore {      // UPLOAD AND STATIC FILE SERVING      public = "/public",      upload = "/upload", +    dataUriToImage = "/uploadURI",      images = "/images",      // USER AND WORKSPACES diff --git a/src/server/Search.ts b/src/server/Search.ts new file mode 100644 index 000000000..5ca5578a7 --- /dev/null +++ b/src/server/Search.ts @@ -0,0 +1,49 @@ +import * as rp from 'request-promise'; +import { Database } from './database'; +import { thisExpression } from 'babel-types'; + +export class Search { +    public static Instance = new Search(); +    private url = 'http://localhost:8983/solr/'; + +    public async updateDocument(document: any) { +        try { +            const res = await rp.post(this.url + "dash/update", { +                headers: { 'content-type': 'application/json' }, +                body: JSON.stringify([document]) +            }); +            return res; +        } catch (e) { +            console.warn("Search error: " + e + document); +        } +    } + +    public async search(query: string) { +        try { +            const searchResults = JSON.parse(await rp.get(this.url + "dash/select", { +                qs: { +                    q: query, +                    fl: "id" +                } +            })); +            const fields = searchResults.response.docs; +            const ids = fields.map((field: any) => field.id); +            return ids; +        } catch { +            return []; +        } +    } + +    public async clear() { +        try { +            return await rp.post(this.url + "dash/update", { +                body: { +                    delete: { +                        query: "*:*" +                    } +                }, +                json: true +            }); +        } catch { } +    } +}
\ No newline at end of file diff --git a/src/server/authentication/models/current_user_utils.ts b/src/server/authentication/models/current_user_utils.ts index 5f45d7bcc..e5b7a025b 100644 --- a/src/server/authentication/models/current_user_utils.ts +++ b/src/server/authentication/models/current_user_utils.ts @@ -1,15 +1,17 @@ -import { computed, observable, action, runInAction } from "mobx"; +import { action, computed, observable, runInAction } from "mobx";  import * as rp from 'request-promise'; +import { DocServer } from "../../../client/DocServer";  import { Docs } from "../../../client/documents/Documents"; +import { Gateway, NorthstarSettings } from "../../../client/northstar/manager/Gateway";  import { Attribute, AttributeGroup, Catalog, Schema } from "../../../client/northstar/model/idea/idea";  import { ArrayUtil } from "../../../client/northstar/utils/ArrayUtil"; -import { RouteStore } from "../../RouteStore"; -import { DocServer } from "../../../client/DocServer"; -import { Doc } from "../../../new_fields/Doc"; -import { List } from "../../../new_fields/List";  import { CollectionViewType } from "../../../client/views/collections/CollectionBaseView"; -import { CollectionTreeView } from "../../../client/views/collections/CollectionTreeView";  import { CollectionView } from "../../../client/views/collections/CollectionView"; +import { Doc } from "../../../new_fields/Doc"; +import { List } from "../../../new_fields/List"; +import { listSpec } from "../../../new_fields/Schema"; +import { Cast } from "../../../new_fields/Types"; +import { RouteStore } from "../../RouteStore";  export class CurrentUserUtils {      private static curr_email: string; @@ -31,13 +33,13 @@ export class CurrentUserUtils {          doc.title = this.email;          doc.data = new List<Doc>();          doc.excludeFromLibrary = true; -        doc.optionalRightCollection = Docs.SchemaDocument([], { title: "Pending documents" }); +        doc.optionalRightCollection = Docs.StackingDocument([], { title: "New mobile uploads" });          // doc.library = Docs.TreeDocument([doc], { title: `Library: ${CurrentUserUtils.email}` });          // (doc.library as Doc).excludeFromLibrary = true;          return doc;      } -    public static loadCurrentUser(): Promise<any> { +    public static async loadCurrentUser(): Promise<any> {          let userPromise = rp.get(DocServer.prepend(RouteStore.getCurrUser)).then(response => {              if (response) {                  let obj = JSON.parse(response); @@ -47,7 +49,7 @@ export class CurrentUserUtils {                  throw new Error("There should be a user! Why does Dash think there isn't one?");              }          }); -        let userDocPromise = rp.get(DocServer.prepend(RouteStore.getUserDocumentId)).then(id => { +        let userDocPromise = await rp.get(DocServer.prepend(RouteStore.getUserDocumentId)).then(id => {              if (id) {                  return DocServer.GetRefField(id).then(field =>                      runInAction(() => this.user_document = field instanceof Doc ? field : this.createUserDocument(id))); @@ -55,14 +57,63 @@ export class CurrentUserUtils {                  throw new Error("There should be a user id! Why does Dash think there isn't one?");              }          }); +        try { +            const getEnvironment = await fetch("/assets/env.json", { redirect: "follow", method: "GET", credentials: "include" }); +            NorthstarSettings.Instance.UpdateEnvironment(await getEnvironment.json()); +            await Gateway.Instance.ClearCatalog(); +            const extraSchemas = Cast(CurrentUserUtils.UserDocument.DBSchemas, listSpec("string"), []); +            let extras = await Promise.all(extraSchemas.map(sc => Gateway.Instance.GetSchema("", sc))); +            let catprom = CurrentUserUtils.SetNorthstarCatalog(await Gateway.Instance.GetCatalog(), extras); +            // if (catprom) await Promise.all(catprom); +        } catch (e) { + +        }          return Promise.all([userPromise, userDocPromise]);      }      /* Northstar catalog ... really just for testing so this should eventually go away */ +    // --------------- Northstar hooks ------------- / +    static _northstarSchemas: Doc[] = [];      @observable private static _northstarCatalog?: Catalog;      @computed public static get NorthstarDBCatalog() { return this._northstarCatalog; } + +    @action static SetNorthstarCatalog(ctlog: Catalog, extras: Catalog[]) { +        CurrentUserUtils.NorthstarDBCatalog = ctlog; +        // if (ctlog && ctlog.schemas) { +        //     extras.map(ex => ctlog.schemas!.push(ex)); +        //     return ctlog.schemas.map(async schema => { +        //         let schemaDocuments: Doc[] = []; +        //         let attributesToBecomeDocs = CurrentUserUtils.GetAllNorthstarColumnAttributes(schema); +        //         await Promise.all(attributesToBecomeDocs.reduce((promises, attr) => { +        //             promises.push(DocServer.GetRefField(attr.displayName! + ".alias").then(action((field: Opt<Field>) => { +        //                 if (field instanceof Doc) { +        //                     schemaDocuments.push(field); +        //                 } else { +        //                     var atmod = new ColumnAttributeModel(attr); +        //                     let histoOp = new HistogramOperation(schema.displayName!, +        //                         new AttributeTransformationModel(atmod, AggregateFunction.None), +        //                         new AttributeTransformationModel(atmod, AggregateFunction.Count), +        //                         new AttributeTransformationModel(atmod, AggregateFunction.Count)); +        //                     schemaDocuments.push(Docs.HistogramDocument(histoOp, { width: 200, height: 200, title: attr.displayName! })); +        //                 } +        //             }))); +        //             return promises; +        //         }, [] as Promise<void>[])); +        //         return CurrentUserUtils._northstarSchemas.push(Docs.TreeDocument(schemaDocuments, { width: 50, height: 100, title: schema.displayName! })); +        //     }); +        // } +    }      public static set NorthstarDBCatalog(ctlog: Catalog | undefined) { this._northstarCatalog = ctlog; } +    public static AddNorthstarSchema(schema: Schema, schemaDoc: Doc) { +        if (this._northstarCatalog && CurrentUserUtils._northstarSchemas) { +            this._northstarCatalog.schemas!.push(schema); +            CurrentUserUtils._northstarSchemas.push(schemaDoc); +            let schemas = Cast(CurrentUserUtils.UserDocument.DBSchemas, listSpec("string"), []); +            schemas.push(schema.displayName!); +            CurrentUserUtils.UserDocument.DBSchemas = new List<string>(schemas); +        } +    }      public static GetNorthstarSchema(name: string): Schema | undefined {          return !this._northstarCatalog || !this._northstarCatalog.schemas ? undefined :              ArrayUtil.FirstOrDefault<Schema>(this._northstarCatalog.schemas, (s: Schema) => s.displayName === name); diff --git a/src/server/database.ts b/src/server/database.ts index 37cfcf3a3..70b3efced 100644 --- a/src/server/database.ts +++ b/src/server/database.ts @@ -8,9 +8,13 @@ export class Database {      private url = 'mongodb://localhost:27017/Dash';      private currentWrites: { [id: string]: Promise<void> } = {};      private db?: mongodb.Db; +    private onConnect: (() => void)[] = [];      constructor() { -        this.MongoClient.connect(this.url, (err, client) => this.db = client.db()); +        this.MongoClient.connect(this.url, (err, client) => { +            this.db = client.db(); +            this.onConnect.forEach(fn => fn()); +        });      }      public update(id: string, value: any, callback: () => void, upsert = true, collectionName = Database.DocumentsCollection) { @@ -22,13 +26,6 @@ export class Database {                  return new Promise<void>(resolve => {                      collection.updateOne({ _id: id }, value, { upsert }                          , (err, res) => { -                            if (err) { -                                console.log(err.message); -                                console.log(err.errmsg); -                            } -                            // if (res) { -                            //     console.log(JSON.stringify(res.result)); -                            // }                              if (this.currentWrites[id] === newProm) {                                  delete this.currentWrites[id];                              } @@ -39,50 +36,98 @@ export class Database {              };              newProm = prom ? prom.then(run) : run();              this.currentWrites[id] = newProm; +        } else { +            this.onConnect.push(() => this.update(id, value, callback, upsert, collectionName));          }      }      public delete(id: string, collectionName = Database.DocumentsCollection) { -        this.db && this.db.collection(collectionName).remove({ id: id }); +        if (this.db) { +            this.db.collection(collectionName).remove({ id: id }); +        } else { +            this.onConnect.push(() => this.delete(id, collectionName)); +        }      }      public deleteAll(collectionName = Database.DocumentsCollection): Promise<any> { -        return new Promise(res => -            this.db && this.db.collection(collectionName).deleteMany({}, res)); +        return new Promise(res => { +            if (this.db) { +                this.db.collection(collectionName).deleteMany({}, res); +            } else { +                this.onConnect.push(() => this.db && this.db.collection(collectionName).deleteMany({}, res)); +            } +        });      }      public insert(value: any, collectionName = Database.DocumentsCollection) { -        if ("id" in value) { -            value._id = value.id; -            delete value.id; +        if (this.db) { +            if ("id" in value) { +                value._id = value.id; +                delete value.id; +            } +            const id = value._id; +            const collection = this.db.collection(collectionName); +            const prom = this.currentWrites[id]; +            let newProm: Promise<void>; +            const run = (): Promise<void> => { +                return new Promise<void>(resolve => { +                    collection.insertOne(value, (err, res) => { +                        if (this.currentWrites[id] === newProm) { +                            delete this.currentWrites[id]; +                        } +                        resolve(); +                    }); +                }); +            }; +            newProm = prom ? prom.then(run) : run(); +            this.currentWrites[id] = newProm; +        } else { +            this.onConnect.push(() => this.insert(value, collectionName));          } -        this.db && this.db.collection(collectionName).insertOne(value);      }      public getDocument(id: string, fn: (result?: Transferable) => void, collectionName = Database.DocumentsCollection) { -        this.db && this.db.collection(collectionName).findOne({ _id: id }, (err, result) => { -            if (result) { -                result.id = result._id; -                delete result._id; -                fn(result); -            } else { -                fn(undefined); -            } -        }); +        if (this.db) { +            this.db.collection(collectionName).findOne({ _id: id }, (err, result) => { +                if (result) { +                    result.id = result._id; +                    delete result._id; +                    fn(result); +                } else { +                    fn(undefined); +                } +            }); +        } else { +            this.onConnect.push(() => this.getDocument(id, fn, collectionName)); +        }      }      public getDocuments(ids: string[], fn: (result: Transferable[]) => void, collectionName = Database.DocumentsCollection) { -        this.db && this.db.collection(collectionName).find({ _id: { "$in": ids } }).toArray((err, docs) => { -            if (err) { -                console.log(err.message); -                console.log(err.errmsg); -            } -            fn(docs.map(doc => { -                doc.id = doc._id; -                delete doc._id; -                return doc; -            })); -        }); +        if (this.db) { +            this.db.collection(collectionName).find({ _id: { "$in": ids } }).toArray((err, docs) => { +                if (err) { +                    console.log(err.message); +                    console.log(err.errmsg); +                } +                fn(docs.map(doc => { +                    doc.id = doc._id; +                    delete doc._id; +                    return doc; +                })); +            }); +        } else { +            this.onConnect.push(() => this.getDocuments(ids, fn, collectionName)); +        } +    } + +    public query(query: any): Promise<mongodb.Cursor> { +        if (this.db) { +            return Promise.resolve<mongodb.Cursor>(this.db.collection('newDocuments').find(query)); +        } else { +            return new Promise<mongodb.Cursor>(res => { +                this.onConnect.push(() => res(this.query(query))); +            }); +        }      }      public print() { diff --git a/src/server/downsize.ts b/src/server/downsize.ts new file mode 100644 index 000000000..ed68fbecc --- /dev/null +++ b/src/server/downsize.ts @@ -0,0 +1,40 @@ +import * as sharp from 'sharp'; +import * as fs from 'fs'; + +const folder = "./src/server/public/files/"; +const pngTypes = ["png", "PNG"]; +const jpgTypes = ["jpg", "JPG", "jpeg", "JPEG"]; +const smallResizer = sharp().resize(100); +fs.readdir(folder, async (err, files) => { +    if (err) { +        console.log(err); +        return; +    } +    // files.forEach(file => { +    //     if (file.includes("_s") || file.includes("_m") || file.includes("_l")) { +    //         fs.unlink(folder + file, () => { }); +    //     } +    // }); +    for (const file of files) { +        const filesplit = file.split("."); +        let resizers = [ +            { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, +            { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: "_m" }, +            { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: "_l" }, +        ]; +        if (pngTypes.some(type => file.endsWith(type))) { +            resizers.forEach(element => { +                element.resizer = element.resizer.png(); +            }); +        } else if (jpgTypes.some(type => file.endsWith(type))) { +            resizers.forEach(element => { +                element.resizer = element.resizer.jpeg(); +            }); +        } else { +            continue; +        } +        resizers.forEach(resizer => { +            fs.createReadStream(folder + file).pipe(resizer.resizer).pipe(fs.createWriteStream(folder + filesplit[0] + resizer.suffix + "." + filesplit[1])); +        }); +    } +});
\ No newline at end of file diff --git a/src/server/index.ts b/src/server/index.ts index 6801b3132..fd66c90b4 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -6,11 +6,14 @@ import * as session from 'express-session';  import * as expressValidator from 'express-validator';  import * as formidable from 'formidable';  import * as fs from 'fs'; +import * as sharp from 'sharp'; +const imageDataUri = require('image-data-uri');  import * as mobileDetect from 'mobile-detect';  import { ObservableMap } from 'mobx';  import * as passport from 'passport';  import * as path from 'path';  import * as request from 'request'; +import * as rp from 'request-promise';  import * as io from 'socket.io';  import { Socket } from 'socket.io';  import * as webpack from 'webpack'; @@ -21,7 +24,7 @@ import { getForgot, getLogin, getLogout, getReset, getSignup, postForgot, postLo  import { DashUserModel } from './authentication/models/user_model';  import { Client } from './Client';  import { Database } from './database'; -import { MessageStore, Transferable, Diff } from "./Message"; +import { MessageStore, Transferable, Types, Diff } from "./Message";  import { RouteStore } from './RouteStore';  const app = express();  const config = require('../../webpack.config'); @@ -31,6 +34,9 @@ const serverPort = 4321;  import expressFlash = require('express-flash');  import flash = require('connect-flash');  import c = require("crypto"); +import { Search } from './Search'; +import { debug } from 'util'; +import _ = require('lodash');  const MongoStore = require('connect-mongo')(session);  const mongoose = require('mongoose'); @@ -54,7 +60,7 @@ app.use(session({  app.use(flash());  app.use(expressFlash()); -app.use(bodyParser.json()); +app.use(bodyParser.json({ limit: "10mb" }));  app.use(bodyParser.urlencoded({ extended: true }));  app.use(expressValidator());  app.use(passport.initialize()); @@ -117,8 +123,16 @@ app.get("/pull", (req, res) =>          res.redirect("/");      })); +// SEARCH +  // GETTERS +app.get("/search", async (req, res) => { +    let query = req.query.query || "hello"; +    let results = await Search.Instance.search(query); +    res.send(results); +}); +  // anyone attempting to navigate to localhost at this port will  // first have to login  addSecureRoute( @@ -154,13 +168,15 @@ addSecureRoute(      RouteStore.getCurrUser  ); +const pngTypes = [".png", ".PNG"]; +const jpgTypes = [".jpg", ".JPG", ".jpeg", ".JPEG"]; +const uploadDir = __dirname + "/public/files/";  // SETTERS - -addSecureRoute( -    Method.POST, -    (user, res, req) => { +app.post( +    RouteStore.upload, +    (req, res) => {          let form = new formidable.IncomingForm(); -        form.uploadDir = __dirname + "/public/files/"; +        form.uploadDir = uploadDir;          form.keepExtensions = true;          // let path = req.body.path;          console.log("upload"); @@ -168,15 +184,76 @@ addSecureRoute(              console.log("parsing");              let names: string[] = [];              for (const name in files) { -                names.push(`/files/` + path.basename(files[name].path)); +                const file = path.basename(files[name].path); +                const ext = path.extname(file); +                let resizers = [ +                    { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, +                    { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: "_m" }, +                    { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: "_l" }, +                ]; +                let isImage = false; +                if (pngTypes.includes(ext)) { +                    resizers.forEach(element => { +                        element.resizer = element.resizer.png(); +                    }); +                    isImage = true; +                } else if (jpgTypes.includes(ext)) { +                    resizers.forEach(element => { +                        element.resizer = element.resizer.jpeg(); +                    }); +                    isImage = true; +                } +                if (isImage) { +                    resizers.forEach(resizer => { +                        fs.createReadStream(uploadDir + file).pipe(resizer.resizer).pipe(fs.createWriteStream(uploadDir + file.substring(0, file.length - ext.length) + resizer.suffix + ext)); +                    }); +                } +                names.push(`/files/` + file);              }              res.send(names);          }); +    } +); + +addSecureRoute( +    Method.POST, +    (user, res, req) => { +        const uri = req.body.uri; +        const filename = req.body.name; +        if (!uri || !filename) { +            res.status(401).send("incorrect parameters specified"); +            return; +        } +        imageDataUri.outputFile(uri, uploadDir + filename).then((savedName: string) => { +            const ext = path.extname(savedName); +            let resizers = [ +                { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, +                { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: "_m" }, +                { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: "_l" }, +            ]; +            let isImage = false; +            if (pngTypes.includes(ext)) { +                resizers.forEach(element => { +                    element.resizer = element.resizer.png(); +                }); +                isImage = true; +            } else if (jpgTypes.includes(ext)) { +                resizers.forEach(element => { +                    element.resizer = element.resizer.jpeg(); +                }); +                isImage = true; +            } +            if (isImage) { +                resizers.forEach(resizer => { +                    fs.createReadStream(savedName).pipe(resizer.resizer).pipe(fs.createWriteStream(uploadDir + filename + resizer.suffix + ext)); +                }); +            } +            res.send("/files/" + filename + ext); +        });      },      undefined, -    RouteStore.upload +    RouteStore.dataUriToImage  ); -  // AUTHENTICATION  // Sign Up @@ -240,6 +317,7 @@ server.on("connection", function (socket: Socket) {  async function deleteFields() {      await Database.Instance.deleteAll(); +    await Search.Instance.clear();      await Database.Instance.deleteAll('newDocuments');  } @@ -248,6 +326,7 @@ async function deleteAll() {      await Database.Instance.deleteAll('newDocuments');      await Database.Instance.deleteAll('sessions');      await Database.Instance.deleteAll('users'); +    await Search.Instance.clear();  }  function barReceived(guid: String) { @@ -266,6 +345,11 @@ function getFields([ids, callback]: [string[], (result: Transferable[]) => void]  function setField(socket: Socket, newValue: Transferable) {      Database.Instance.update(newValue.id, newValue, () =>          socket.broadcast.emit(MessageStore.SetField.Message, newValue)); +    if (newValue.type === Types.Text) { +        Search.Instance.updateDocument({ id: newValue.id, data: (newValue as any).data }); +        console.log("set field"); +        console.log("checking in"); +    }  }  function GetRefField([id, callback]: [string, (result?: Transferable) => void]) { @@ -276,9 +360,81 @@ function GetRefFields([ids, callback]: [string[], (result?: Transferable[]) => v      Database.Instance.getDocuments(ids, callback, "newDocuments");  } + +const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = { +    "number": "_n", +    "string": "_t", +    // "boolean": "_b", +    // "image": ["_t", "url"], +    "video": ["_t", "url"], +    "pdf": ["_t", "url"], +    "audio": ["_t", "url"], +    "web": ["_t", "url"], +    "date": ["_d", value => new Date(value.date).toISOString()], +    "proxy": ["_i", "fieldId"], +    "list": ["_l", list => { +        const results = []; +        for (const value of list.fields) { +            const term = ToSearchTerm(value); +            if (term) { +                results.push(term.value); +            } +        } +        return results.length ? results : null; +    }] +}; + +function ToSearchTerm(val: any): { suffix: string, value: any } | undefined { +    if (val === null || val === undefined) { +        return; +    } +    const type = val.__type || typeof val; +    let suffix = suffixMap[type]; +    if (!suffix) { +        return; +    } + +    if (Array.isArray(suffix)) { +        const accessor = suffix[1]; +        if (typeof accessor === "function") { +            val = accessor(val); +        } else { +            val = val[accessor]; +        } +        suffix = suffix[0]; +    } + +    return { suffix, value: val }; +} + +function getSuffix(value: string | [string, any]): string { +    return typeof value === "string" ? value : value[0]; +} +  function UpdateField(socket: Socket, diff: Diff) {      Database.Instance.update(diff.id, diff.diff,          () => socket.broadcast.emit(MessageStore.UpdateField.Message, diff), false, "newDocuments"); +    const docfield = diff.diff.$set; +    if (!docfield) { +        return; +    } +    const update: any = { id: diff.id }; +    let dynfield = false; +    for (let key in docfield) { +        if (!key.startsWith("fields.")) continue; +        dynfield = true; +        let val = docfield[key]; +        key = key.substring(7); +        Object.values(suffixMap).forEach(suf => update[key + getSuffix(suf)] = { set: null }); +        let term = ToSearchTerm(val); +        if (term !== undefined) { +            let { suffix, value } = term; +            update[key + suffix] = { set: value }; +        } +    } +    if (dynfield) { +        Search.Instance.updateDocument(update); +    }  }  function CreateField(newValue: any) { diff --git a/src/server/public/files/.gitignore b/src/server/public/files/.gitignore index f59ec20aa..c96a04f00 100644 --- a/src/server/public/files/.gitignore +++ b/src/server/public/files/.gitignore @@ -1 +1,2 @@ -*
\ No newline at end of file +* +!.gitignore
\ No newline at end of file diff --git a/src/server/remapUrl.ts b/src/server/remapUrl.ts new file mode 100644 index 000000000..6f4d6642f --- /dev/null +++ b/src/server/remapUrl.ts @@ -0,0 +1,59 @@ +import { Database } from "./database"; +import { Search } from "./Search"; +import * as path from 'path'; + +const suffixMap: { [type: string]: true } = { +    "video": true, +    "pdf": true, +    "audio": true, +    "web": true +}; + +async function update() { +    await new Promise(res => setTimeout(res, 10)); +    console.log("update"); +    const cursor = await Database.Instance.query({}); +    console.log("Cleared"); +    const updates: [string, any][] = []; +    function updateDoc(doc: any) { +        if (doc.__type !== "Doc") { +            return; +        } +        const fields = doc.fields; +        if (!fields) { +            return; +        } +        const update: any = { +        }; +        let dynfield = false; +        for (const key in fields) { +            const value = fields[key]; +            if (value && value.__type && suffixMap[value.__type]) { +                const url = new URL(value.url); +                if (url.href.includes("azure")) { +                    dynfield = true; + +                    update.$set = { ["fields." + key + ".url"]: `${url.protocol}//localhost:1050${url.pathname}` }; +                } +            } +        } +        if (dynfield) { +            updates.push([doc._id, update]); +        } +    } +    await cursor.forEach(updateDoc); +    await Promise.all(updates.map(doc => { +        console.log(doc[0], doc[1]); +        return new Promise(res => Database.Instance.update(doc[0], doc[1], () => { +            console.log("wrote " + JSON.stringify(doc[1])); +            res(); +        }, false, "newDocuments")); +    })); +    console.log("Done"); +    // await Promise.all(updates.map(update => { +    //     return limit(() => Search.Instance.updateDocument(update)); +    // })); +    cursor.close(); +} + +update(); diff --git a/src/server/updateSearch.ts b/src/server/updateSearch.ts new file mode 100644 index 000000000..de1fd25e1 --- /dev/null +++ b/src/server/updateSearch.ts @@ -0,0 +1,101 @@ +import { Database } from "./database"; +import { Cursor } from "mongodb"; +import { Search } from "./Search"; +import pLimit from 'p-limit'; + +const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = { +    "number": "_n", +    "string": "_t", +    // "boolean": "_b", +    // "image": ["_t", "url"], +    "video": ["_t", "url"], +    "pdf": ["_t", "url"], +    "audio": ["_t", "url"], +    "web": ["_t", "url"], +    "date": ["_d", value => new Date(value.date).toISOString()], +    "proxy": ["_i", "fieldId"], +    "list": ["_l", list => { +        const results = []; +        for (const value of list.fields) { +            const term = ToSearchTerm(value); +            if (term) { +                results.push(term.value); +            } +        } +        return results.length ? results : null; +    }] +}; + +function ToSearchTerm(val: any): { suffix: string, value: any } | undefined { +    if (val === null || val === undefined) { +        return; +    } +    const type = val.__type || typeof val; +    let suffix = suffixMap[type]; +    if (!suffix) { +        return; +    } + +    if (Array.isArray(suffix)) { +        const accessor = suffix[1]; +        if (typeof accessor === "function") { +            val = accessor(val); +        } else { +            val = val[accessor]; +        } +        suffix = suffix[0]; +    } + +    return { suffix, value: val }; +} + +function getSuffix(value: string | [string, any]): string { +    return typeof value === "string" ? value : value[0]; +} + +const limit = pLimit(5); +async function update() { +    // await new Promise(res => setTimeout(res, 5)); +    console.log("update"); +    await Search.Instance.clear(); +    const cursor = await Database.Instance.query({}); +    console.log("Cleared"); +    const updates: any[] = []; +    let numDocs = 0; +    function updateDoc(doc: any) { +        numDocs++; +        if ((numDocs % 50) === 0) { +            console.log("updateDoc " + numDocs); +        } +        console.log("doc " + numDocs); +        if (doc.__type !== "Doc") { +            return; +        } +        const fields = doc.fields; +        if (!fields) { +            return; +        } +        const update: any = { id: doc._id }; +        let dynfield = false; +        for (const key in fields) { +            const value = fields[key]; +            const term = ToSearchTerm(value); +            if (term !== undefined) { +                let { suffix, value } = term; +                update[key + suffix] = value; +                dynfield = true; +            } +        } +        if (dynfield) { +            updates.push(update); +            console.log(updates.length); +        } +    } +    await cursor.forEach(updateDoc); +    await Promise.all(updates.map(update => { +        return limit(() => Search.Instance.updateDocument(update)); +    })); +    cursor.close(); +} + +update();
\ No newline at end of file | 
