From 91e4ac65e0b8d1ff5c17ea0e80666038281ec5a6 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 15 Oct 2019 12:54:57 -0400 Subject: initial commit --- src/server/ActionUtilities.ts | 28 + src/server/Initialization.ts | 110 ++ src/server/RouteManager.ts | 131 ++ src/server/RouteStore.ts | 1 + src/server/apis/google/GoogleApiServerUtils.ts | 21 +- src/server/database.ts | 7 +- src/server/index.ts | 2089 +++++++++++------------- 7 files changed, 1240 insertions(+), 1147 deletions(-) create mode 100644 src/server/ActionUtilities.ts create mode 100644 src/server/Initialization.ts create mode 100644 src/server/RouteManager.ts (limited to 'src') diff --git a/src/server/ActionUtilities.ts b/src/server/ActionUtilities.ts new file mode 100644 index 000000000..9a009791b --- /dev/null +++ b/src/server/ActionUtilities.ts @@ -0,0 +1,28 @@ +import * as fs from 'fs'; +import { ExecOptions } from 'shelljs'; +import { exec } from 'child_process'; +import * as path from 'path'; + +export const command_line = (command: string, fromDirectory?: string) => { + return new Promise((resolve, reject) => { + let options: ExecOptions = {}; + if (fromDirectory) { + options.cwd = path.join(__dirname, fromDirectory); + } + exec(command, options, (err, stdout) => err ? reject(err) : resolve(stdout)); + }); +}; + +export const read_text_file = (relativePath: string) => { + let target = path.join(__dirname, relativePath); + return new Promise((resolve, reject) => { + fs.readFile(target, (err, data) => err ? reject(err) : resolve(data.toString())); + }); +}; + +export const write_text_file = (relativePath: string, contents: any) => { + let target = path.join(__dirname, relativePath); + return new Promise((resolve, reject) => { + fs.writeFile(target, contents, (err) => err ? reject(err) : resolve()); + }); +}; \ No newline at end of file diff --git a/src/server/Initialization.ts b/src/server/Initialization.ts new file mode 100644 index 000000000..e371a3edb --- /dev/null +++ b/src/server/Initialization.ts @@ -0,0 +1,110 @@ +import * as express from 'express'; +import * as expressValidator from 'express-validator'; +import * as session from 'express-session'; +import * as passport from 'passport'; +import * as bodyParser from 'body-parser'; +import * as cookieParser from 'cookie-parser'; +import expressFlash = require('express-flash'); +import flash = require('connect-flash'); +import { Database } from './database'; +const MongoStore = require('connect-mongo')(session); +import mongoose, { ConnectionStates } from 'mongoose'; +import { RouteStore } from './RouteStore'; +import RouteManager from './RouteManager'; +import * as webpack from 'webpack'; +const config = require('../../webpack.config'); +const compiler = webpack(config); +import * as wdm from 'webpack-dev-middleware'; +import * as whm from 'webpack-hot-middleware'; +import * as fs from 'fs'; + +export interface InitializationOptions { + listenAtPort: number; + routeSetter: (server: RouteManager) => void; +} + +export default async function InitializeServer(options: InitializationOptions) { + const { listenAtPort, routeSetter } = options; + const server = injectMiddleware(express()); + const { url } = Database; + try { + await connectToDatabase(url); + } catch (e) { + console.error(`Mongoose FAILED to establish default connection at ${url}`); + console.error(e); + console.log('Since a valid database connection is required to use Dash, killing the server process.\nPlease try again later.'); + process.exit(1); + } + + // static file serving + server.use(express.static(__dirname + RouteStore.public)); + server.use(RouteStore.images, express.static(__dirname + RouteStore.public)); + + routeSetter(new RouteManager(server, determineEnvironment())); + + server.use(wdm(compiler, { publicPath: config.output.publicPath })); + server.use(whm(compiler)); + server.listen(listenAtPort, () => console.log(`server started at http://localhost:${listenAtPort}`)); + + return server; +} + +function determineEnvironment() { + const isRelease = process.env.RELEASE === "true"; + + console.log(`running server in ${isRelease ? 'release' : 'debug'} mode`); + console.log(process.env.PWD); + + let clientUtils = fs.readFileSync("./src/client/util/ClientUtils.ts.temp", "utf8"); + clientUtils = `//AUTO-GENERATED FILE: DO NOT EDIT\n${clientUtils.replace('"mode"', String(isRelease))}`; + fs.writeFileSync("./src/client/util/ClientUtils.ts", clientUtils, "utf8"); + + return isRelease; +} + +const week = 7 * 24 * 60 * 60 * 1000; +const secret = "64d6866242d3b5a5503c675b32c9605e4e90478e9b77bcf2bc"; + +function injectMiddleware(server: express.Express) { + [ + cookieParser(), + session({ + secret, + resave: true, + cookie: { maxAge: week }, + saveUninitialized: true, + store: new MongoStore({ url: Database.url }) + }), + flash(), + expressFlash(), + bodyParser.json({ limit: "10mb" }), + bodyParser.urlencoded({ extended: true }), + expressValidator(), + passport.initialize(), + passport.session(), + (req: express.Request, res: express.Response, next: express.NextFunction) => { + res.locals.user = req.user; + next(); + } + ].forEach(next => server.use(next)); + return server; +} + +async function connectToDatabase(url: string) { + const { connection } = mongoose; + process.on('SIGINT', () => { + connection.close(() => { + console.log('Mongoose default connection disconnected through app termination'); + process.exit(0); + }); + }); + if (connection.readyState === ConnectionStates.disconnected) { + return new Promise((resolve, reject) => { + connection.on('error', reject); + connection.on('connected', () => { + console.log(`Mongoose established default connection at ${url}`); + resolve(); + }); + }); + } +} \ No newline at end of file diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts new file mode 100644 index 000000000..cf15e45c9 --- /dev/null +++ b/src/server/RouteManager.ts @@ -0,0 +1,131 @@ +import RouteSubscriber from "./RouteSubscriber"; +import { RouteStore } from "./RouteStore"; +import { DashUserModel } from "./authentication/models/user_model"; +import * as express from 'express'; +import * as qs from 'query-string'; + +export default class RouteManager { + private server: express.Express; + private _isRelease: boolean; + + public get release() { + return this._isRelease; + } + + constructor(server: express.Express, isRelease: boolean) { + this.server = server; + this._isRelease = isRelease; + } + + /** + * Please invoke this function when adding a new route to Dash's server. + * It ensures that any requests leading to or containing user-sensitive information + * does not execute unless Passport authentication detects a user logged in. + * @param method whether or not the request is a GET or a POST + * @param handler the action to invoke, recieving a DashUserModel and, as expected, the Express.Request and Express.Response + * @param onRejection an optional callback invoked on return if no user is found to be logged in + * @param subscribers the forward slash prepended path names (reference and add to RouteStore.ts) that will all invoke the given @param handler + */ + addSupervisedRoute(initializer: RouteInitializer) { + const { method, subscription, onValidation, onRejection, onError } = initializer; + const release = this._isRelease; + let abstracted = async (req: express.Request, res: express.Response) => { + const { user, originalUrl: target } = req; + if (user || isSharedDocAccess(target)) { + try { + await onValidation(user, req, res, release); + } catch (e) { + if (onError) { + onError(req, res, e, release); + } else { + _error(res, `The server encountered an internal error handling ${target}.`, e); + } + } + } else { + req.session!.target = target; + try { + await (onRejection || LoginRedirect)(req, res, release); + } catch (e) { + if (onError) { + onError(req, res, e, this._isRelease); + } else { + _error(res, `The server encountered an internal error when rejecting ${target}.`, e); + } + } + } + }; + const subscribe = (subscriber: RouteSubscriber | string) => { + let route: string; + if (typeof subscriber === "string") { + route = subscriber; + } else { + route = subscriber.build; + } + switch (method) { + case Method.GET: + this.server.get(route, abstracted); + break; + case Method.POST: + this.server.post(route, abstracted); + break; + } + }; + if (Array.isArray(subscription)) { + subscription.forEach(subscribe); + } else { + subscribe(subscription); + } + } + +} + +export enum Method { + GET, + POST +} + +export type ValidationHandler = (user: DashUserModel, req: express.Request, res: express.Response, isRelease: boolean) => any | Promise; +export type RejectionHandler = (req: express.Request, res: express.Response, isRelease: boolean) => any | Promise; +export type ErrorHandler = (req: express.Request, res: express.Response, error: any, isRelease: boolean) => any | Promise; + +const LoginRedirect: RejectionHandler = (_req, res) => res.redirect(RouteStore.login); + +export interface RouteInitializer { + method: Method; + subscription: string | RouteSubscriber | (string | RouteSubscriber)[]; + onValidation: ValidationHandler; + onRejection?: RejectionHandler; + onError?: ErrorHandler; +} + +const isSharedDocAccess = (target: string) => { + const shared = qs.parse(qs.extract(target), { sort: false }).sharing === "true"; + const docAccess = target.startsWith("/doc/"); + return shared && docAccess; +}; + +export const STATUS = { + OK: 200, + BAD_REQUEST: 400, + EXECUTION_ERROR: 500, + PERMISSION_DENIED: 403 +}; + +export function _error(res: express.Response, message: string, error?: any) { + res.statusMessage = message; + res.status(STATUS.EXECUTION_ERROR).send(error); +} + +export function _success(res: express.Response, body: any) { + res.status(STATUS.OK).send(body); +} + +export function _invalid(res: express.Response, message: string) { + res.statusMessage = message; + res.status(STATUS.BAD_REQUEST).send(); +} + +export function _permission_denied(res: express.Response, message: string) { + res.statusMessage = message; + res.status(STATUS.BAD_REQUEST).send("Permission Denied!"); +} diff --git a/src/server/RouteStore.ts b/src/server/RouteStore.ts index 7426ffb39..de2553b2f 100644 --- a/src/server/RouteStore.ts +++ b/src/server/RouteStore.ts @@ -7,6 +7,7 @@ export enum RouteStore { corsProxy = "/corsProxy", delete = "/delete", deleteAll = "/deleteAll", + pull = "/pull", // UPLOAD AND STATIC FILE SERVING public = "/public", diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index 5714c9928..6093197f1 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -9,6 +9,8 @@ import request = require('request-promise'); import * as qs from 'query-string'; import Photos = require('googlephotos'); import { Database } from "../../database"; +const path = require("path"); + /** * Server side authentication for Google Api queries. */ @@ -76,22 +78,24 @@ export namespace GoogleApiServerUtils { }); }; - const RetrieveOAuthClient = async (information: CredentialInformation) => { - return new Promise((resolve, reject) => { - readFile(information.credentialsPath, async (err, credentials) => { + let AuthorizationManager: OAuth2Client; + + export const LoadOAuthClient = async () => { + return new Promise((resolve, reject) => { + readFile(path.join(__dirname, "../../credentials/google_docs_credentials.json"), async (err, credentials) => { if (err) { reject(err); return console.log('Error loading client secret file:', err); } const { client_secret, client_id, redirect_uris } = parseBuffer(credentials).installed; - resolve(new google.auth.OAuth2(client_id, client_secret, redirect_uris[0])); + AuthorizationManager = new google.auth.OAuth2(client_id, client_secret, redirect_uris[0]); + resolve(); }); }); }; export const GenerateAuthenticationUrl = async (information: CredentialInformation) => { - const client = await RetrieveOAuthClient(information); - return client.generateAuthUrl({ + return AuthorizationManager.generateAuthUrl({ access_type: 'offline', scope: SCOPES.map(relative => prefix + relative), }); @@ -103,14 +107,13 @@ export namespace GoogleApiServerUtils { name: string; } export const ProcessClientSideCode = async (information: CredentialInformation, authenticationCode: string): Promise => { - const oAuth2Client = await RetrieveOAuthClient(information); return new Promise((resolve, reject) => { - oAuth2Client.getToken(authenticationCode, async (err, token) => { + AuthorizationManager.getToken(authenticationCode, async (err, token) => { if (err || !token) { reject(err); return console.error('Error retrieving access token', err); } - oAuth2Client.setCredentials(token); + AuthorizationManager.setCredentials(token); const enriched = injectUserInfo(token); await Database.Auxiliary.GoogleAuthenticationToken.Write(information.userId, enriched); const { given_name, picture } = enriched.userInfo; diff --git a/src/server/database.ts b/src/server/database.ts index db86b472d..25e1e67e0 100644 --- a/src/server/database.ts +++ b/src/server/database.ts @@ -8,16 +8,19 @@ import { GoogleApiServerUtils } from './apis/google/GoogleApiServerUtils'; export namespace Database { + const schema = 'Dash'; + const port = 27017; + export const url = `mongodb://localhost:${port}/${schema}`; + class Database { public static DocumentsCollection = 'documents'; private MongoClient = mongodb.MongoClient; - private url = 'mongodb://localhost:27017/Dash'; private currentWrites: { [id: string]: Promise } = {}; private db?: mongodb.Db; private onConnect: (() => void)[] = []; constructor() { - this.MongoClient.connect(this.url, (err, client) => { + this.MongoClient.connect(url, (_err, client) => { this.db = client.db(); this.onConnect.forEach(fn => fn()); }); diff --git a/src/server/index.ts b/src/server/index.ts index 2203ae2e1..ad18857b6 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -1,1255 +1,1072 @@ require('dotenv').config(); -import * as bodyParser from 'body-parser'; -import { exec, ExecOptions } from 'child_process'; -import * as cookieParser from 'cookie-parser'; -import * as express from 'express'; -import * as session from 'express-session'; -import * as expressValidator from 'express-validator'; +import { exec } from 'child_process'; import * as formidable from 'formidable'; import * as fs from 'fs'; import * as sharp from 'sharp'; import * as Pdfjs from 'pdfjs-dist'; const imageDataUri = require('image-data-uri'); import * as mobileDetect from 'mobile-detect'; -import * as passport from 'passport'; import * as path from 'path'; import * as request from 'request'; -import * as io from 'socket.io'; +import io from 'socket.io'; import { Socket } from 'socket.io'; -import * as webpack from 'webpack'; -import * as wdm from 'webpack-dev-middleware'; -import * as whm from 'webpack-hot-middleware'; import { Utils } from '../Utils'; import { getForgot, getLogin, getLogout, getReset, getSignup, postForgot, postLogin, postReset, postSignup } from './authentication/controllers/user_controller'; -import { DashUserModel } from './authentication/models/user_model'; import { Client } from './Client'; import { Database } from './database'; import { MessageStore, Transferable, Types, Diff, YoutubeQueryTypes as YoutubeQueryType, YoutubeQueryInput } from "./Message"; import { RouteStore } from './RouteStore'; import v4 = require('uuid/v4'); -const app = express(); -const config = require('../../webpack.config'); import { createCanvas } from "canvas"; -const compiler = webpack(config); -const port = 1050; // default port to listen const serverPort = 4321; -import expressFlash = require('express-flash'); -import flash = require('connect-flash'); import { Search } from './Search'; import * as Archiver from 'archiver'; var AdmZip = require('adm-zip'); import * as YoutubeApi from "./apis/youtube/youtubeApiSample"; import { Response } from 'express-serve-static-core'; import { GoogleApiServerUtils } from "./apis/google/GoogleApiServerUtils"; -const MongoStore = require('connect-mongo')(session); -const mongoose = require('mongoose'); const probe = require("probe-image-size"); const pdf = require('pdf-parse'); var findInFiles = require('find-in-files'); import { GooglePhotosUploadUtils } from './apis/google/GooglePhotosUploadUtils'; -import * as qs from 'query-string'; import { Opt } from '../new_fields/Doc'; import { DashUploadUtils } from './DashUploadUtils'; import { BatchedArray, TimeUnit } from 'array-batcher'; import { ParsedPDF } from "./PdfTypes"; import { reject } from 'bluebird'; -import { ExifData } from 'exif'; -import { Result } from '../client/northstar/model/idea/idea'; import RouteSubscriber from './RouteSubscriber'; +import InitializeServer from './Initialization'; +import { Method, _success, _permission_denied, _error, _invalid } from './RouteManager'; +import { command_line, read_text_file } from './ActionUtilities'; -const download = (url: string, dest: fs.PathLike) => request.get(url).pipe(fs.createWriteStream(dest)); let youtubeApiKey: string; -YoutubeApi.readApiKey((apiKey: string) => youtubeApiKey = apiKey); -const release = process.env.RELEASE === "true"; -if (process.env.RELEASE === "true") { - console.log("Running server in release mode"); -} else { - console.log("Running server in debug mode"); -} -console.log(process.env.PWD); -let clientUtils = fs.readFileSync("./src/client/util/ClientUtils.ts.temp", "utf8"); -clientUtils = `//AUTO-GENERATED FILE: DO NOT EDIT\n${clientUtils.replace('"mode"', String(release))}`; -fs.writeFileSync("./src/client/util/ClientUtils.ts", clientUtils, "utf8"); - -const mongoUrl = 'mongodb://localhost:27017/Dash'; -mongoose.connection.readyState === 0 && mongoose.connect(mongoUrl); -mongoose.connection.on('connected', () => console.log("connected")); - -// SESSION MANAGEMENT AND AUTHENTICATION MIDDLEWARE -// ORDER OF IMPORTS MATTERS - -app.use(cookieParser()); -app.use(session({ - secret: "64d6866242d3b5a5503c675b32c9605e4e90478e9b77bcf2bc", - resave: true, - cookie: { maxAge: 7 * 24 * 60 * 60 * 1000 }, - saveUninitialized: true, - store: new MongoStore({ url: 'mongodb://localhost:27017/Dash' }) -})); - -app.use(flash()); -app.use(expressFlash()); -app.use(bodyParser.json({ limit: "10mb" })); -app.use(bodyParser.urlencoded({ extended: true })); -app.use(expressValidator()); -app.use(passport.initialize()); -app.use(passport.session()); -app.use((req, res, next) => { - res.locals.user = req.user; - next(); -}); - -app.get("/hello", (req, res) => res.send("

Hello

")); - -enum Method { - GET, - POST +export type Hierarchy = { [id: string]: string | Hierarchy }; +export type ZipMutator = (file: Archiver.Archiver) => void | Promise; + +export interface NewMediaItem { + description: string; + simpleMediaItem: { + uploadToken: string; + }; } -export type ValidationHandler = (user: DashUserModel, req: express.Request, res: express.Response) => any | Promise; -export type RejectionHandler = (req: express.Request, res: express.Response) => any | Promise; -export type ErrorHandler = (req: express.Request, res: express.Response, error: any) => any | Promise; +(async () => { + YoutubeApi.readApiKey((apiKey: string) => youtubeApiKey = apiKey); + await GoogleApiServerUtils.LoadOAuthClient(); + + await InitializeServer({ + listenAtPort: 1050, + routeSetter: router => { + + router.addSupervisedRoute({ + method: Method.GET, + subscription: "/pull", + onValidation: (_user, _req, res) => { + exec('"C:\\Program Files\\Git\\git-bash.exe" -c "git pull"', err => { + if (err) { + res.send(err.message); + return; + } + res.redirect("/"); + }); + } + }); -const LoginRedirect: RejectionHandler = (_req, res) => res.redirect(RouteStore.login); + router.addSupervisedRoute({ + method: Method.GET, + subscription: "/buxton", + onValidation: (_user, _req, res) => { + let cwd = '../scraping/buxton'; -export interface RouteInitializer { - method: Method; - subscribers: string | RouteSubscriber | (string | RouteSubscriber)[]; - onValidation: ValidationHandler; - onRejection?: RejectionHandler; - onError?: ErrorHandler; -} + let onResolved = (stdout: string) => { console.log(stdout); res.redirect("/"); }; + let onRejected = (err: any) => { console.error(err.message); res.send(err); }; + let tryPython3 = () => command_line('python3 scraper.py', cwd).then(onResolved, onRejected); -const isSharedDocAccess = (target: string) => { - const shared = qs.parse(qs.extract(target), { sort: false }).sharing === "true"; - const docAccess = target.startsWith("/doc/"); - return shared && docAccess; -}; - -/** - * Please invoke this function when adding a new route to Dash's server. - * It ensures that any requests leading to or containing user-sensitive information - * does not execute unless Passport authentication detects a user logged in. - * @param method whether or not the request is a GET or a POST - * @param handler the action to invoke, recieving a DashUserModel and, as expected, the Express.Request and Express.Response - * @param onRejection an optional callback invoked on return if no user is found to be logged in - * @param subscribers the forward slash prepended path names (reference and add to RouteStore.ts) that will all invoke the given @param handler - */ -function addSecureRoute(initializer: RouteInitializer) { - const { method, subscribers, onValidation, onRejection, onError } = initializer; - let abstracted = async (req: express.Request, res: express.Response) => { - const { user, originalUrl: target } = req; - if (user || isSharedDocAccess(target)) { - try { - await onValidation(user, req, res); - } catch (e) { - if (onError) { - onError(req, res, e); - } else { - _error(res, `The server encountered an internal error handling ${target}.`, e); - } - } - } else { - req.session!.target = target; - try { - await (onRejection || LoginRedirect)(req, res); - } catch (e) { - if (onError) { - onError(req, res, e); - } else { - _error(res, `The server encountered an internal error when rejecting ${target}.`, e); + command_line('python scraper.py', cwd).then(onResolved, tryPython3); + }, + }); + + router.addSupervisedRoute({ + method: Method.GET, + subscription: "/version", + onValidation: (_user, _req, res) => { + exec('"C:\\Program Files\\Git\\bin\\git.exe" rev-parse HEAD', (err, stdout) => { + if (err) { + res.send(err.message); + return; + } + res.send(stdout); + }); } - } - } - }; - const subscribe = (subscriber: RouteSubscriber | string) => { - let route: string; - if (typeof subscriber === "string") { - route = subscriber; - } else { - route = subscriber.build; - } - switch (method) { - case Method.GET: - app.get(route, abstracted); - break; - case Method.POST: - app.post(route, abstracted); - break; - } - }; - if (Array.isArray(subscribers)) { - subscribers.forEach(subscribe); - } else { - subscribe(subscribers); - } -} + }); -// STATIC FILE SERVING -app.use(express.static(__dirname + RouteStore.public)); -app.use(RouteStore.images, express.static(__dirname + RouteStore.public)); + // SEARCH + const solrURL = "http://localhost:8983/solr/#/dash"; -app.get("/pull", (req, res) => - exec('"C:\\Program Files\\Git\\git-bash.exe" -c "git pull"', (err, stdout, stderr) => { - if (err) { - res.send(err.message); - return; - } - res.redirect("/"); - })); - -app.get("/buxton", (req, res) => { - let cwd = '../scraping/buxton'; - - let onResolved = (stdout: string) => { console.log(stdout); res.redirect("/"); }; - let onRejected = (err: any) => { console.error(err.message); res.send(err); }; - let tryPython3 = () => command_line('python3 scraper.py', cwd).then(onResolved, onRejected); - - command_line('python scraper.py', cwd).then(onResolved, tryPython3); -}); - -const STATUS = { - OK: 200, - BAD_REQUEST: 400, - EXECUTION_ERROR: 500, - PERMISSION_DENIED: 403 -}; - -const command_line = (command: string, fromDirectory?: string) => { - return new Promise((resolve, reject) => { - let options: ExecOptions = {}; - if (fromDirectory) { - options.cwd = path.join(__dirname, fromDirectory); - } - exec(command, options, (err, stdout) => err ? reject(err) : resolve(stdout)); - }); -}; + // GETTERS -const read_text_file = (relativePath: string) => { - let target = path.join(__dirname, relativePath); - return new Promise((resolve, reject) => { - fs.readFile(target, (err, data) => err ? reject(err) : resolve(data.toString())); - }); -}; + DashServer.get("/textsearch", async (req, res) => { + let q = req.query.q; + if (q === undefined) { + res.send([]); + return; + } + let results = await findInFiles.find({ 'term': q, 'flags': 'ig' }, uploadDirectory + "text", ".txt$"); + let resObj: { ids: string[], numFound: number, lines: string[] } = { ids: [], numFound: 0, lines: [] }; + for (var result in results) { + resObj.ids.push(path.basename(result, ".txt").replace(/upload_/, "")); + resObj.lines.push(results[result].line); + resObj.numFound++; + } + res.send(resObj); + }); -const write_text_file = (relativePath: string, contents: any) => { - let target = path.join(__dirname, relativePath); - return new Promise((resolve, reject) => { - fs.writeFile(target, contents, (err) => err ? reject(err) : resolve()); - }); -}; + DashServer.get("/search", async (req, res) => { + const solrQuery: any = {}; + ["q", "fq", "start", "rows", "hl", "hl.fl"].forEach(key => solrQuery[key] = req.query[key]); + if (solrQuery.q === undefined) { + res.send([]); + return; + } + let results = await Search.Instance.search(solrQuery); + res.send(results); + }); -app.get("/version", (req, res) => { - exec('"C:\\Program Files\\Git\\bin\\git.exe" rev-parse HEAD', (err, stdout, stderr) => { - if (err) { - res.send(err.message); - return; - } - res.send(stdout); - }); -}); - -// SEARCH -const solrURL = "http://localhost:8983/solr/#/dash"; - -// GETTERS - -app.get("/textsearch", async (req, res) => { - let q = req.query.q; - if (q === undefined) { - res.send([]); - return; - } - let results = await findInFiles.find({ 'term': q, 'flags': 'ig' }, uploadDirectory + "text", ".txt$"); - let resObj: { ids: string[], numFound: number, lines: string[] } = { ids: [], numFound: 0, lines: [] }; - for (var result in results) { - resObj.ids.push(path.basename(result, ".txt").replace(/upload_/, "")); - resObj.lines.push(results[result].line); - resObj.numFound++; - } - res.send(resObj); -}); - -app.get("/search", async (req, res) => { - const solrQuery: any = {}; - ["q", "fq", "start", "rows", "hl", "hl.fl"].forEach(key => solrQuery[key] = req.query[key]); - if (solrQuery.q === undefined) { - res.send([]); - return; - } - let results = await Search.Instance.search(solrQuery); - res.send(results); -}); - -function msToTime(duration: number) { - let milliseconds = Math.floor((duration % 1000) / 100), - seconds = Math.floor((duration / 1000) % 60), - minutes = Math.floor((duration / (1000 * 60)) % 60), - hours = Math.floor((duration / (1000 * 60 * 60)) % 24); - - let hoursS = (hours < 10) ? "0" + hours : hours; - let minutesS = (minutes < 10) ? "0" + minutes : minutes; - let secondsS = (seconds < 10) ? "0" + seconds : seconds; - - return hoursS + ":" + minutesS + ":" + secondsS + "." + milliseconds; -} + function msToTime(duration: number) { + let milliseconds = Math.floor((duration % 1000) / 100), + seconds = Math.floor((duration / 1000) % 60), + minutes = Math.floor((duration / (1000 * 60)) % 60), + hours = Math.floor((duration / (1000 * 60 * 60)) % 24); -async function getDocs(id: string) { - const files = new Set(); - const docs: { [id: string]: any } = {}; - const fn = (doc: any): string[] => { - const id = doc.id; - if (typeof id === "string" && id.endsWith("Proto")) { - //Skip protos - return []; - } - const ids: string[] = []; - for (const key in doc.fields) { - if (!doc.fields.hasOwnProperty(key)) { - continue; + let hoursS = (hours < 10) ? "0" + hours : hours; + let minutesS = (minutes < 10) ? "0" + minutes : minutes; + let secondsS = (seconds < 10) ? "0" + seconds : seconds; + + return hoursS + ":" + minutesS + ":" + secondsS + "." + milliseconds; } - const field = doc.fields[key]; - if (field === undefined || field === null) { - continue; + + async function getDocs(id: string) { + const files = new Set(); + const docs: { [id: string]: any } = {}; + const fn = (doc: any): string[] => { + const id = doc.id; + if (typeof id === "string" && id.endsWith("Proto")) { + //Skip protos + return []; + } + const ids: string[] = []; + for (const key in doc.fields) { + if (!doc.fields.hasOwnProperty(key)) { + continue; + } + const field = doc.fields[key]; + if (field === undefined || field === null) { + continue; + } + + if (field.__type === "proxy" || field.__type === "prefetch_proxy") { + ids.push(field.fieldId); + } else if (field.__type === "script" || field.__type === "computed") { + if (field.captures) { + ids.push(field.captures.fieldId); + } + } else if (field.__type === "list") { + ids.push(...fn(field)); + } else if (typeof field === "string") { + const re = /"(?:dataD|d)ocumentId"\s*:\s*"([\w\-]*)"/g; + let match: string[] | null; + while ((match = re.exec(field)) !== null) { + ids.push(match[1]); + } + } else if (field.__type === "RichTextField") { + const re = /"href"\s*:\s*"(.*?)"/g; + let match: string[] | null; + while ((match = re.exec(field.Data)) !== null) { + const urlString = match[1]; + const split = new URL(urlString).pathname.split("doc/"); + if (split.length > 1) { + ids.push(split[split.length - 1]); + } + } + const re2 = /"src"\s*:\s*"(.*?)"/g; + while ((match = re2.exec(field.Data)) !== null) { + const urlString = match[1]; + const pathname = new URL(urlString).pathname; + files.add(pathname); + } + } else if (["audio", "image", "video", "pdf", "web"].includes(field.__type)) { + const url = new URL(field.url); + const pathname = url.pathname; + files.add(pathname); + } + } + + if (doc.id) { + docs[doc.id] = doc; + } + return ids; + }; + await Database.Instance.visit([id], fn); + return { id, docs, files }; } + DashServer.get("/serializeDoc/:docId", async (req, res) => { + const { docs, files } = await getDocs(req.params.docId); + res.send({ docs, files: Array.from(files) }); + }); - if (field.__type === "proxy" || field.__type === "prefetch_proxy") { - ids.push(field.fieldId); - } else if (field.__type === "script" || field.__type === "computed") { - if (field.captures) { - ids.push(field.captures.fieldId); + router.addSupervisedRoute({ + method: Method.GET, + subscription: new RouteSubscriber(RouteStore.imageHierarchyExport).add('docId'), + onValidation: async (_user, req, res) => { + const id = req.params.docId; + const hierarchy: Hierarchy = {}; + await targetedVisitorRecursive(id, hierarchy); + BuildAndDispatchZip(res, async zip => { + await hierarchyTraverserRecursive(zip, hierarchy); + }); } - } else if (field.__type === "list") { - ids.push(...fn(field)); - } else if (typeof field === "string") { - const re = /"(?:dataD|d)ocumentId"\s*:\s*"([\w\-]*)"/g; - let match: string[] | null; - while ((match = re.exec(field)) !== null) { - ids.push(match[1]); + }); + + const BuildAndDispatchZip = async (res: Response, mutator: ZipMutator): Promise => { + const zip = Archiver('zip'); + zip.pipe(res); + await mutator(zip); + return zip.finalize(); + }; + + const targetedVisitorRecursive = async (seedId: string, hierarchy: Hierarchy): Promise => { + const local: Hierarchy = {}; + const { title, data } = await getData(seedId); + const label = `${title} (${seedId})`; + if (Array.isArray(data)) { + hierarchy[label] = local; + await Promise.all(data.map(proxy => targetedVisitorRecursive(proxy.fieldId, local))); + } else { + hierarchy[label + path.extname(data)] = data; } - } else if (field.__type === "RichTextField") { - const re = /"href"\s*:\s*"(.*?)"/g; - let match: string[] | null; - while ((match = re.exec(field.Data)) !== null) { - const urlString = match[1]; - const split = new URL(urlString).pathname.split("doc/"); - if (split.length > 1) { - ids.push(split[split.length - 1]); + }; + + const getData = async (seedId: string): Promise<{ data: string | any[], title: string }> => { + return new Promise<{ data: string | any[], title: string }>((resolve, reject) => { + Database.Instance.getDocument(seedId, async (result: any) => { + const { data, proto, title } = result.fields; + if (data) { + if (data.url) { + resolve({ data: data.url, title }); + } else if (data.fields) { + resolve({ data: data.fields, title }); + } else { + reject(); + } + } + if (proto) { + getData(proto.fieldId).then(resolve, reject); + } + }); + }); + }; + + const hierarchyTraverserRecursive = async (file: Archiver.Archiver, hierarchy: Hierarchy, prefix = "Dash Export"): Promise => { + for (const key of Object.keys(hierarchy)) { + const result = hierarchy[key]; + if (typeof result === "string") { + let path: string; + let matches: RegExpExecArray | null; + if ((matches = /\:1050\/files\/(upload\_[\da-z]{32}.*)/g.exec(result)) !== null) { + path = `${__dirname}/public/files/${matches[1]}`; + } else { + const information = await DashUploadUtils.UploadImage(result); + path = information.mediaPaths[0]; + } + file.file(path, { name: key, prefix }); + } else { + await hierarchyTraverserRecursive(file, result, `${prefix}/${key}`); } } - const re2 = /"src"\s*:\s*"(.*?)"/g; - while ((match = re2.exec(field.Data)) !== null) { - const urlString = match[1]; - const pathname = new URL(urlString).pathname; - files.add(pathname); + }; + + DashServer.get("/downloadId/:docId", async (req, res) => { + res.set('Content-disposition', `attachment;`); + res.set('Content-Type', "application/zip"); + const { id, docs, files } = await getDocs(req.params.docId); + const docString = JSON.stringify({ id, docs }); + const zip = Archiver('zip'); + zip.pipe(res); + zip.append(docString, { name: "doc.json" }); + files.forEach(val => { + zip.file(__dirname + RouteStore.public + val, { name: val.substring(1) }); + }); + zip.finalize(); + }); + + DashServer.post("/uploadDoc", (req, res) => { + let form = new formidable.IncomingForm(); + form.keepExtensions = true; + // let path = req.body.path; + const ids: { [id: string]: string } = {}; + let remap = true; + const getId = (id: string): string => { + if (!remap) return id; + if (id.endsWith("Proto")) return id; + if (id in ids) { + return ids[id]; + } else { + return ids[id] = v4(); + } + }; + const mapFn = (doc: any) => { + if (doc.id) { + doc.id = getId(doc.id); + } + for (const key in doc.fields) { + if (!doc.fields.hasOwnProperty(key)) { + continue; + } + const field = doc.fields[key]; + if (field === undefined || field === null) { + continue; + } + + if (field.__type === "proxy" || field.__type === "prefetch_proxy") { + field.fieldId = getId(field.fieldId); + } else if (field.__type === "script" || field.__type === "computed") { + if (field.captures) { + field.captures.fieldId = getId(field.captures.fieldId); + } + } else if (field.__type === "list") { + mapFn(field); + } else if (typeof field === "string") { + const re = /("(?:dataD|d)ocumentId"\s*:\s*")([\w\-]*)"/g; + doc.fields[key] = (field as any).replace(re, (match: any, p1: string, p2: string) => { + return `${p1}${getId(p2)}"`; + }); + } else if (field.__type === "RichTextField") { + const re = /("href"\s*:\s*")(.*?)"/g; + field.Data = field.Data.replace(re, (match: any, p1: string, p2: string) => { + return `${p1}${getId(p2)}"`; + }); + } + } + }; + form.parse(req, async (err, fields, files) => { + remap = fields.remap !== "false"; + let id: string = ""; + try { + for (const name in files) { + const path_2 = files[name].path; + const zip = new AdmZip(path_2); + zip.getEntries().forEach((entry: any) => { + if (!entry.entryName.startsWith("files/")) return; + let dirname = path.dirname(entry.entryName) + "/"; + let extname = path.extname(entry.entryName); + let basename = path.basename(entry.entryName).split(".")[0]; + // zip.extractEntryTo(dirname + basename + "_o" + extname, __dirname + RouteStore.public, true, false); + // zip.extractEntryTo(dirname + basename + "_s" + extname, __dirname + RouteStore.public, true, false); + // zip.extractEntryTo(dirname + basename + "_m" + extname, __dirname + RouteStore.public, true, false); + // zip.extractEntryTo(dirname + basename + "_l" + extname, __dirname + RouteStore.public, true, false); + try { + zip.extractEntryTo(entry.entryName, __dirname + RouteStore.public, true, false); + dirname = "/" + dirname; + + fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_o" + extname)); + fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_s" + extname)); + fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_m" + extname)); + fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_l" + extname)); + } catch (e) { + console.log(e); + } + }); + const json = zip.getEntry("doc.json"); + let docs: any; + try { + let data = JSON.parse(json.getData().toString("utf8")); + docs = data.docs; + id = data.id; + docs = Object.keys(docs).map(key => docs[key]); + docs.forEach(mapFn); + await Promise.all(docs.map((doc: any) => new Promise(res => Database.Instance.replace(doc.id, doc, (err, r) => { + err && console.log(err); + res(); + }, true, "newDocuments")))); + } catch (e) { console.log(e); } + fs.unlink(path_2, () => { }); + } + if (id) { + res.send(JSON.stringify(getId(id))); + } else { + res.send(JSON.stringify("error")); + } + } catch (e) { console.log(e); } + }); + }); + + DashServer.get("/whosOnline", (req, res) => { + let users: any = { active: {}, inactive: {} }; + const now = Date.now(); + + for (const user in timeMap) { + const time = timeMap[user]; + const key = ((now - time) / 1000) < (60 * 5) ? "active" : "inactive"; + users[key][user] = `Last active ${msToTime(now - time)} ago`; } - } else if (["audio", "image", "video", "pdf", "web"].includes(field.__type)) { - const url = new URL(field.url); - const pathname = url.pathname; - files.add(pathname); + + res.send(users); + }); + DashServer.get("/thumbnail/:filename", (req, res) => { + let filename = req.params.filename; + let noExt = filename.substring(0, filename.length - ".png".length); + let pagenumber = parseInt(noExt.split('-')[1]); + fs.exists(uploadDirectory + filename, (exists: boolean) => { + console.log(`${uploadDirectory + filename} ${exists ? "exists" : "does not exist"}`); + if (exists) { + let input = fs.createReadStream(uploadDirectory + filename); + probe(input, (err: any, result: any) => { + if (err) { + console.log(err); + console.log(`error on ${filename}`); + return; + } + res.send({ path: "/files/" + filename, width: result.width, height: result.height }); + }); + } + else { + LoadPage(uploadDirectory + filename.substring(0, filename.length - noExt.split('-')[1].length - ".PNG".length - 1) + ".pdf", pagenumber, res); + } + }); + }); + + function LoadPage(file: string, pageNumber: number, res: Response) { + console.log(file); + Pdfjs.getDocument(file).promise + .then((pdf: Pdfjs.PDFDocumentProxy) => { + let factory = new NodeCanvasFactory(); + console.log(pageNumber); + pdf.getPage(pageNumber).then((page: Pdfjs.PDFPageProxy) => { + console.log("reading " + page); + let viewport = page.getViewport(1 as any); + let canvasAndContext = factory.create(viewport.width, viewport.height); + let renderContext = { + canvasContext: canvasAndContext.context, + viewport: viewport, + canvasFactory: factory + }; + console.log("read " + pageNumber); + + page.render(renderContext).promise + .then(() => { + console.log("saving " + pageNumber); + let stream = canvasAndContext.canvas.createPNGStream(); + let pngFile = `${file.substring(0, file.length - ".pdf".length)}-${pageNumber}.PNG`; + let out = fs.createWriteStream(pngFile); + stream.pipe(out); + out.on("finish", () => { + console.log(`Success! Saved to ${pngFile}`); + let name = path.basename(pngFile); + res.send({ path: "/files/" + name, width: viewport.width, height: viewport.height }); + }); + }, (reason: string) => { + console.error(reason + ` ${pageNumber}`); + }); + }); + }); } - } - if (doc.id) { - docs[doc.id] = doc; - } - return ids; - }; - await Database.Instance.visit([id], fn); - return { id, docs, files }; -} -app.get("/serializeDoc/:docId", async (req, res) => { - const { docs, files } = await getDocs(req.params.docId); - res.send({ docs, files: Array.from(files) }); -}); + /** + * Anyone attempting to navigate to localhost at this port will + * first have to log in. + */ + router.addSupervisedRoute({ + method: Method.GET, + subscription: RouteStore.root, + onValidation: (_user, _req, res) => res.redirect(RouteStore.home) + }); -export type Hierarchy = { [id: string]: string | Hierarchy }; -export type ZipMutator = (file: Archiver.Archiver) => void | Promise; + router.addSupervisedRoute({ + method: Method.GET, + subscription: RouteStore.getUsers, + onValidation: async (_user, _req, res) => { + const cursor = await Database.Instance.query({}, { email: 1, userDocumentId: 1 }, "users"); + const results = await cursor.toArray(); + res.send(results.map(user => ({ email: user.email, userDocumentId: user.userDocumentId }))); + }, + }); -addSecureRoute({ - method: Method.GET, - subscribers: new RouteSubscriber(RouteStore.imageHierarchyExport).add('docId'), - onValidation: async (_user, req, res) => { - const id = req.params.docId; - const hierarchy: Hierarchy = {}; - await targetedVisitorRecursive(id, hierarchy); - BuildAndDispatchZip(res, async zip => { - await hierarchyTraverserRecursive(zip, hierarchy); - }); - } -}); - -const BuildAndDispatchZip = async (res: Response, mutator: ZipMutator): Promise => { - const zip = Archiver('zip'); - zip.pipe(res); - await mutator(zip); - return zip.finalize(); -}; - -const targetedVisitorRecursive = async (seedId: string, hierarchy: Hierarchy): Promise => { - const local: Hierarchy = {}; - const { title, data } = await getData(seedId); - const label = `${title} (${seedId})`; - if (Array.isArray(data)) { - hierarchy[label] = local; - await Promise.all(data.map(proxy => targetedVisitorRecursive(proxy.fieldId, local))); - } else { - hierarchy[label + path.extname(data)] = data; - } -}; - -const getData = async (seedId: string): Promise<{ data: string | any[], title: string }> => { - return new Promise<{ data: string | any[], title: string }>((resolve, reject) => { - Database.Instance.getDocument(seedId, async (result: any) => { - const { data, proto, title } = result.fields; - if (data) { - if (data.url) { - resolve({ data: data.url, title }); - } else if (data.fields) { - resolve({ data: data.fields, title }); - } else { - reject(); + router.addSupervisedRoute({ + method: Method.GET, + subscription: [RouteStore.home, RouteStore.openDocumentWithId], + onValidation: (_user, req, res) => { + let detector = new mobileDetect(req.headers['user-agent'] || ""); + let filename = detector.mobile() !== null ? 'mobile/image.html' : 'index.html'; + res.sendFile(path.join(__dirname, '../../deploy/' + filename)); + }, + }); + + router.addSupervisedRoute({ + method: Method.GET, + subscription: RouteStore.getUserDocumentId, + onValidation: (user, _req, res) => res.send(user.userDocumentId), + onRejection: (_req, res) => res.send(undefined) + }); + + router.addSupervisedRoute({ + method: Method.GET, + subscription: RouteStore.getCurrUser, + onValidation: (user, _req, res) => { res.send(JSON.stringify(user)); }, + onRejection: (_req, res) => res.send(JSON.stringify({ id: "__guest__", email: "" })) + }); + + const ServicesApiKeyMap = new Map([ + ["face", process.env.FACE], + ["vision", process.env.VISION], + ["handwriting", process.env.HANDWRITING] + ]); + + router.addSupervisedRoute({ + method: Method.GET, + subscription: new RouteSubscriber(RouteStore.cognitiveServices).add('requestedservice'), + onValidation: (_user, req, res) => { + let service = req.params.requestedservice; + res.send(ServicesApiKeyMap.get(service)); + } + }); + + class NodeCanvasFactory { + create = (width: number, height: number) => { + var canvas = createCanvas(width, height); + var context = canvas.getContext('2d'); + return { + canvas: canvas, + context: context, + }; + } + + reset = (canvasAndContext: any, width: number, height: number) => { + canvasAndContext.canvas.width = width; + canvasAndContext.canvas.height = height; + } + + destroy = (canvasAndContext: any) => { + canvasAndContext.canvas.width = 0; + canvasAndContext.canvas.height = 0; + canvasAndContext.canvas = null; + canvasAndContext.context = null; } } - if (proto) { - getData(proto.fieldId).then(resolve, reject); - } - }); - }); -}; - -const hierarchyTraverserRecursive = async (file: Archiver.Archiver, hierarchy: Hierarchy, prefix = "Dash Export"): Promise => { - for (const key of Object.keys(hierarchy)) { - const result = hierarchy[key]; - if (typeof result === "string") { - let path: string; - let matches: RegExpExecArray | null; - if ((matches = /\:1050\/files\/(upload\_[\da-z]{32}.*)/g.exec(result)) !== null) { - path = `${__dirname}/public/files/${matches[1]}`; - } else { - const information = await DashUploadUtils.UploadImage(result); - path = information.mediaPaths[0]; - } - file.file(path, { name: key, prefix }); - } else { - await hierarchyTraverserRecursive(file, result, `${prefix}/${key}`); - } - } -}; - -app.get("/downloadId/:docId", async (req, res) => { - res.set('Content-disposition', `attachment;`); - res.set('Content-Type', "application/zip"); - const { id, docs, files } = await getDocs(req.params.docId); - const docString = JSON.stringify({ id, docs }); - const zip = Archiver('zip'); - zip.pipe(res); - zip.append(docString, { name: "doc.json" }); - files.forEach(val => { - zip.file(__dirname + RouteStore.public + val, { name: val.substring(1) }); - }); - zip.finalize(); -}); - -app.post("/uploadDoc", (req, res) => { - let form = new formidable.IncomingForm(); - form.keepExtensions = true; - // let path = req.body.path; - const ids: { [id: string]: string } = {}; - let remap = true; - const getId = (id: string): string => { - if (!remap) return id; - if (id.endsWith("Proto")) return id; - if (id in ids) { - return ids[id]; - } else { - return ids[id] = v4(); - } - }; - const mapFn = (doc: any) => { - if (doc.id) { - doc.id = getId(doc.id); - } - for (const key in doc.fields) { - if (!doc.fields.hasOwnProperty(key)) { - continue; - } - const field = doc.fields[key]; - if (field === undefined || field === null) { - continue; + + const pngTypes = [".png", ".PNG"]; + const jpgTypes = [".jpg", ".JPG", ".jpeg", ".JPEG"]; + const uploadDirectory = __dirname + "/public/files/"; + const pdfDirectory = uploadDirectory + "text"; + DashUploadUtils.createIfNotExists(pdfDirectory); + + interface ImageFileResponse { + name: string; + path: string; + type: string; + exif: Opt; } - if (field.__type === "proxy" || field.__type === "prefetch_proxy") { - field.fieldId = getId(field.fieldId); - } else if (field.__type === "script" || field.__type === "computed") { - if (field.captures) { - field.captures.fieldId = getId(field.captures.fieldId); + router.addSupervisedRoute({ + method: Method.POST, + subscription: RouteStore.upload, + onValidation: (_user, req, res) => { + let form = new formidable.IncomingForm(); + form.uploadDir = uploadDirectory; + form.keepExtensions = true; + form.parse(req, async (_err, _fields, files) => { + let results: ImageFileResponse[] = []; + for (const key in files) { + const { type, path: location, name } = files[key]; + const filename = path.basename(location); + let uploadInformation: Opt; + if (filename.endsWith(".pdf")) { + let dataBuffer = fs.readFileSync(uploadDirectory + filename); + const result: ParsedPDF = await pdf(dataBuffer); + await new Promise(resolve => { + const path = pdfDirectory + "/" + filename.substring(0, filename.length - ".pdf".length) + ".txt"; + fs.createWriteStream(path).write(result.text, error => { + if (!error) { + resolve(); + } else { + reject(error); + } + }); + }); + } else { + uploadInformation = await DashUploadUtils.UploadImage(uploadDirectory + filename, filename); + } + const exif = uploadInformation ? uploadInformation.exifData : undefined; + results.push({ name, type, path: `/files/${filename}`, exif }); + + } + _success(res, results); + }); } - } else if (field.__type === "list") { - mapFn(field); - } else if (typeof field === "string") { - const re = /("(?:dataD|d)ocumentId"\s*:\s*")([\w\-]*)"/g; - doc.fields[key] = (field as any).replace(re, (match: any, p1: string, p2: string) => { - return `${p1}${getId(p2)}"`; - }); - } else if (field.__type === "RichTextField") { - const re = /("href"\s*:\s*")(.*?)"/g; - field.Data = field.Data.replace(re, (match: any, p1: string, p2: string) => { - return `${p1}${getId(p2)}"`; - }); - } - } - }; - form.parse(req, async (err, fields, files) => { - remap = fields.remap !== "false"; - let id: string = ""; - try { - for (const name in files) { - const path_2 = files[name].path; - const zip = new AdmZip(path_2); - zip.getEntries().forEach((entry: any) => { - if (!entry.entryName.startsWith("files/")) return; - let dirname = path.dirname(entry.entryName) + "/"; - let extname = path.extname(entry.entryName); - let basename = path.basename(entry.entryName).split(".")[0]; - // zip.extractEntryTo(dirname + basename + "_o" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_s" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_m" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_l" + extname, __dirname + RouteStore.public, true, false); - try { - zip.extractEntryTo(entry.entryName, __dirname + RouteStore.public, true, false); - dirname = "/" + dirname; - - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_o" + extname)); - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_s" + extname)); - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_m" + extname)); - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_l" + extname)); - } catch (e) { - console.log(e); + }); + + router.addSupervisedRoute({ + method: Method.POST, + subscription: RouteStore.inspectImage, + onValidation: async (_user, req, res) => { + const { source } = req.body; + if (typeof source === "string") { + const uploadInformation = await DashUploadUtils.UploadImage(source); + return res.send(await DashUploadUtils.InspectImage(uploadInformation.mediaPaths[0])); } - }); - const json = zip.getEntry("doc.json"); - let docs: any; - try { - let data = JSON.parse(json.getData().toString("utf8")); - docs = data.docs; - id = data.id; - docs = Object.keys(docs).map(key => docs[key]); - docs.forEach(mapFn); - await Promise.all(docs.map((doc: any) => new Promise(res => Database.Instance.replace(doc.id, doc, (err, r) => { - err && console.log(err); - res(); - }, true, "newDocuments")))); - } catch (e) { console.log(e); } - fs.unlink(path_2, () => { }); - } - if (id) { - res.send(JSON.stringify(getId(id))); - } else { - res.send(JSON.stringify("error")); - } - } catch (e) { console.log(e); } - }); -}); - -app.get("/whosOnline", (req, res) => { - let users: any = { active: {}, inactive: {} }; - const now = Date.now(); - - for (const user in timeMap) { - const time = timeMap[user]; - const key = ((now - time) / 1000) < (60 * 5) ? "active" : "inactive"; - users[key][user] = `Last active ${msToTime(now - time)} ago`; - } - - res.send(users); -}); -app.get("/thumbnail/:filename", (req, res) => { - let filename = req.params.filename; - let noExt = filename.substring(0, filename.length - ".png".length); - let pagenumber = parseInt(noExt.split('-')[1]); - fs.exists(uploadDirectory + filename, (exists: boolean) => { - console.log(`${uploadDirectory + filename} ${exists ? "exists" : "does not exist"}`); - if (exists) { - let input = fs.createReadStream(uploadDirectory + filename); - probe(input, (err: any, result: any) => { - if (err) { - console.log(err); - console.log(`error on ${filename}`); - return; + res.send({}); } - res.send({ path: "/files/" + filename, width: result.width, height: result.height }); }); - } - else { - LoadPage(uploadDirectory + filename.substring(0, filename.length - noExt.split('-')[1].length - ".PNG".length - 1) + ".pdf", pagenumber, res); - } - }); -}); - -function LoadPage(file: string, pageNumber: number, res: Response) { - console.log(file); - Pdfjs.getDocument(file).promise - .then((pdf: Pdfjs.PDFDocumentProxy) => { - let factory = new NodeCanvasFactory(); - console.log(pageNumber); - pdf.getPage(pageNumber).then((page: Pdfjs.PDFPageProxy) => { - console.log("reading " + page); - let viewport = page.getViewport(1 as any); - let canvasAndContext = factory.create(viewport.width, viewport.height); - let renderContext = { - canvasContext: canvasAndContext.context, - viewport: viewport, - canvasFactory: factory - }; - console.log("read " + pageNumber); - - page.render(renderContext).promise - .then(() => { - console.log("saving " + pageNumber); - let stream = canvasAndContext.canvas.createPNGStream(); - let pngFile = `${file.substring(0, file.length - ".pdf".length)}-${pageNumber}.PNG`; - let out = fs.createWriteStream(pngFile); - stream.pipe(out); - out.on("finish", () => { - console.log(`Success! Saved to ${pngFile}`); - let name = path.basename(pngFile); - res.send({ path: "/files/" + name, width: viewport.width, height: viewport.height }); - }); - }, (reason: string) => { - console.error(reason + ` ${pageNumber}`); + + router.addSupervisedRoute({ + method: Method.POST, + subscription: RouteStore.dataUriToImage, + onValidation: (_user, req, res) => { + const uri = req.body.uri; + const filename = req.body.name; + if (!uri || !filename) { + res.status(401).send("incorrect parameters specified"); + return; + } + imageDataUri.outputFile(uri, uploadDirectory + filename).then((savedName: string) => { + const ext = path.extname(savedName); + let resizers = [ + { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, + { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: "_m" }, + { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: "_l" }, + ]; + let isImage = false; + if (pngTypes.includes(ext)) { + resizers.forEach(element => { + element.resizer = element.resizer.png(); + }); + isImage = true; + } else if (jpgTypes.includes(ext)) { + resizers.forEach(element => { + element.resizer = element.resizer.jpeg(); + }); + isImage = true; + } + if (isImage) { + resizers.forEach(resizer => { + fs.createReadStream(savedName).pipe(resizer.resizer).pipe(fs.createWriteStream(uploadDirectory + filename + resizer.suffix + ext)); + }); + } + res.send("/files/" + filename + ext); }); + } }); - }); -} -/** - * Anyone attempting to navigate to localhost at this port will - * first have to log in. - */ -addSecureRoute({ - method: Method.GET, - subscribers: RouteStore.root, - onValidation: (_user, _req, res) => res.redirect(RouteStore.home) -}); - -addSecureRoute({ - method: Method.GET, - subscribers: RouteStore.getUsers, - onValidation: async (_user, _req, res) => { - const cursor = await Database.Instance.query({}, { email: 1, userDocumentId: 1 }, "users"); - const results = await cursor.toArray(); - res.send(results.map(user => ({ email: user.email, userDocumentId: user.userDocumentId }))); - }, -}); - -addSecureRoute({ - method: Method.GET, - subscribers: [RouteStore.home, RouteStore.openDocumentWithId], - onValidation: (_user, req, res) => { - let detector = new mobileDetect(req.headers['user-agent'] || ""); - let filename = detector.mobile() !== null ? 'mobile/image.html' : 'index.html'; - res.sendFile(path.join(__dirname, '../../deploy/' + filename)); - }, -}); - -addSecureRoute({ - method: Method.GET, - subscribers: RouteStore.getUserDocumentId, - onValidation: (user, _req, res) => res.send(user.userDocumentId), - onRejection: (_req, res) => res.send(undefined) -}); - -addSecureRoute({ - method: Method.GET, - subscribers: RouteStore.getCurrUser, - onValidation: (user, _req, res) => { res.send(JSON.stringify(user)); }, - onRejection: (_req, res) => res.send(JSON.stringify({ id: "__guest__", email: "" })) -}); - -const ServicesApiKeyMap = new Map([ - ["face", process.env.FACE], - ["vision", process.env.VISION], - ["handwriting", process.env.HANDWRITING] -]); - -addSecureRoute({ - method: Method.GET, - subscribers: new RouteSubscriber(RouteStore.cognitiveServices).add('requestedservice'), - onValidation: (_user, req, res) => { - let service = req.params.requestedservice; - res.send(ServicesApiKeyMap.get(service)); - } -}); - -class NodeCanvasFactory { - create = (width: number, height: number) => { - var canvas = createCanvas(width, height); - var context = canvas.getContext('2d'); - return { - canvas: canvas, - context: context, - }; - } - - reset = (canvasAndContext: any, width: number, height: number) => { - canvasAndContext.canvas.width = width; - canvasAndContext.canvas.height = height; - } - - destroy = (canvasAndContext: any) => { - canvasAndContext.canvas.width = 0; - canvasAndContext.canvas.height = 0; - canvasAndContext.canvas = null; - canvasAndContext.context = null; - } -} - -const pngTypes = [".png", ".PNG"]; -const jpgTypes = [".jpg", ".JPG", ".jpeg", ".JPEG"]; -const uploadDirectory = __dirname + "/public/files/"; -const pdfDirectory = uploadDirectory + "text"; -DashUploadUtils.createIfNotExists(pdfDirectory); - -interface ImageFileResponse { - name: string; - path: string; - type: string; - exif: Opt; -} - -addSecureRoute({ - method: Method.POST, - subscribers: RouteStore.upload, - onValidation: (_user, req, res) => { - let form = new formidable.IncomingForm(); - form.uploadDir = uploadDirectory; - form.keepExtensions = true; - form.parse(req, async (_err, _fields, files) => { - let results: ImageFileResponse[] = []; - for (const key in files) { - const { type, path: location, name } = files[key]; - const filename = path.basename(location); - let uploadInformation: Opt; - if (filename.endsWith(".pdf")) { - let dataBuffer = fs.readFileSync(uploadDirectory + filename); - const result: ParsedPDF = await pdf(dataBuffer); - await new Promise(resolve => { - const path = pdfDirectory + "/" + filename.substring(0, filename.length - ".pdf".length) + ".txt"; - fs.createWriteStream(path).write(result.text, error => { - if (!error) { - resolve(); - } else { - reject(error); + // AUTHENTICATION + + // Sign Up + DashServer.get(RouteStore.signup, getSignup); + DashServer.post(RouteStore.signup, postSignup); + + // Log In + DashServer.get(RouteStore.login, getLogin); + DashServer.post(RouteStore.login, postLogin); + + // Log Out + DashServer.get(RouteStore.logout, getLogout); + + // FORGOT PASSWORD EMAIL HANDLING + DashServer.get(RouteStore.forgot, getForgot); + DashServer.post(RouteStore.forgot, postForgot); + + // RESET PASSWORD EMAIL HANDLING + DashServer.get(RouteStore.reset, getReset); + DashServer.post(RouteStore.reset, postReset); + + const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/; + DashServer.use(RouteStore.corsProxy, (req, res) => { + req.pipe(request(decodeURIComponent(req.url.substring(1)))).on("response", res => { + const headers = Object.keys(res.headers); + headers.forEach(headerName => { + const header = res.headers[headerName]; + if (Array.isArray(header)) { + res.headers[headerName] = header.filter(h => !headerCharRegex.test(h)); + } else if (header) { + if (headerCharRegex.test(header as any)) { + delete res.headers[headerName]; } - }); + } }); - } else { - uploadInformation = await DashUploadUtils.UploadImage(uploadDirectory + filename, filename); + }).pipe(res); + }); + + router.addSupervisedRoute({ + method: Method.GET, + subscription: RouteStore.delete, + onValidation: (_user, _req, res, isRelease) => { + if (isRelease) { + return _permission_denied(res, deletionPermissionError); + } + deleteFields().then(() => res.redirect(RouteStore.home)); } - const exif = uploadInformation ? uploadInformation.exifData : undefined; - results.push({ name, type, path: `/files/${filename}`, exif }); + }); + router.addSupervisedRoute({ + method: Method.GET, + subscription: RouteStore.deleteAll, + onValidation: (_user, _req, res, isRelease) => { + if (isRelease) { + return _permission_denied(res, deletionPermissionError); + } + deleteAll().then(() => res.redirect(RouteStore.home)); + } + }); + + const server = io(); + interface Map { + [key: string]: Client; } - _success(res, results); - }); - } -}); - -addSecureRoute({ - method: Method.POST, - subscribers: RouteStore.inspectImage, - onValidation: async (_user, req, res) => { - const { source } = req.body; - if (typeof source === "string") { - const uploadInformation = await DashUploadUtils.UploadImage(source); - return res.send(await DashUploadUtils.InspectImage(uploadInformation.mediaPaths[0])); - } - res.send({}); - } -}); - -addSecureRoute({ - method: Method.POST, - subscribers: RouteStore.dataUriToImage, - onValidation: (_user, req, res) => { - const uri = req.body.uri; - const filename = req.body.name; - if (!uri || !filename) { - res.status(401).send("incorrect parameters specified"); - return; - } - imageDataUri.outputFile(uri, uploadDirectory + filename).then((savedName: string) => { - const ext = path.extname(savedName); - let resizers = [ - { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, - { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: "_m" }, - { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: "_l" }, - ]; - let isImage = false; - if (pngTypes.includes(ext)) { - resizers.forEach(element => { - element.resizer = element.resizer.png(); - }); - isImage = true; - } else if (jpgTypes.includes(ext)) { - resizers.forEach(element => { - element.resizer = element.resizer.jpeg(); + let clients: Map = {}; + + let socketMap = new Map(); + let timeMap: { [id: string]: number } = {}; + + server.on("connection", function (socket: Socket) { + socket.use((packet, next) => { + let id = socketMap.get(socket); + if (id) { + timeMap[id] = Date.now(); + } + next(); }); - isImage = true; + + Utils.Emit(socket, MessageStore.Foo, "handshooken"); + + Utils.AddServerHandler(socket, MessageStore.Bar, guid => barReceived(socket, guid)); + Utils.AddServerHandler(socket, MessageStore.SetField, (args) => setField(socket, args)); + Utils.AddServerHandlerCallback(socket, MessageStore.GetField, getField); + Utils.AddServerHandlerCallback(socket, MessageStore.GetFields, getFields); + if (!router.release) { + Utils.AddServerHandler(socket, MessageStore.DeleteAll, deleteFields); + } + + Utils.AddServerHandler(socket, MessageStore.CreateField, CreateField); + Utils.AddServerHandlerCallback(socket, MessageStore.YoutubeApiQuery, HandleYoutubeQuery); + Utils.AddServerHandler(socket, MessageStore.UpdateField, diff => UpdateField(socket, diff)); + Utils.AddServerHandler(socket, MessageStore.DeleteField, id => DeleteField(socket, id)); + Utils.AddServerHandler(socket, MessageStore.DeleteFields, ids => DeleteFields(socket, ids)); + Utils.AddServerHandlerCallback(socket, MessageStore.GetRefField, GetRefField); + Utils.AddServerHandlerCallback(socket, MessageStore.GetRefFields, GetRefFields); + }); + + async function deleteFields() { + await Database.Instance.deleteAll(); + await Search.Instance.clear(); + await Database.Instance.deleteAll('newDocuments'); } - if (isImage) { - resizers.forEach(resizer => { - fs.createReadStream(savedName).pipe(resizer.resizer).pipe(fs.createWriteStream(uploadDirectory + filename + resizer.suffix + ext)); - }); + + async function deleteAll() { + await Database.Instance.deleteAll(); + await Database.Instance.deleteAll('newDocuments'); + await Database.Instance.deleteAll('sessions'); + await Database.Instance.deleteAll('users'); + await Search.Instance.clear(); } - res.send("/files/" + filename + ext); - }); - } -}); - -// AUTHENTICATION - -// Sign Up -app.get(RouteStore.signup, getSignup); -app.post(RouteStore.signup, postSignup); - -// Log In -app.get(RouteStore.login, getLogin); -app.post(RouteStore.login, postLogin); - -// Log Out -app.get(RouteStore.logout, getLogout); - -// FORGOT PASSWORD EMAIL HANDLING -app.get(RouteStore.forgot, getForgot); -app.post(RouteStore.forgot, postForgot); - -// RESET PASSWORD EMAIL HANDLING -app.get(RouteStore.reset, getReset); -app.post(RouteStore.reset, postReset); - -const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/; -app.use(RouteStore.corsProxy, (req, res) => { - req.pipe(request(decodeURIComponent(req.url.substring(1)))).on("response", res => { - const headers = Object.keys(res.headers); - headers.forEach(headerName => { - const header = res.headers[headerName]; - if (Array.isArray(header)) { - res.headers[headerName] = header.filter(h => !headerCharRegex.test(h)); - } else if (header) { - if (headerCharRegex.test(header as any)) { - delete res.headers[headerName]; - } + + function barReceived(socket: SocketIO.Socket, guid: string) { + clients[guid] = new Client(guid.toString()); + console.log(`User ${guid} has connected`); + socketMap.set(socket, guid); } - }); - }).pipe(res); -}); - -addSecureRoute({ - method: Method.GET, - subscribers: RouteStore.delete, - onValidation: (_user, _req, res) => { - if (release) { - return _permission_denied(res, deletionPermissionError); - } - deleteFields().then(() => res.redirect(RouteStore.home)); - } -}); - -addSecureRoute({ - method: Method.GET, - subscribers: RouteStore.deleteAll, - onValidation: (_user, _req, res) => { - if (release) { - return _permission_denied(res, deletionPermissionError); - } - deleteAll().then(() => res.redirect(RouteStore.home)); - } -}); -app.use(wdm(compiler, { publicPath: config.output.publicPath })); + function getField([id, callback]: [string, (result?: Transferable) => void]) { + Database.Instance.getDocument(id, (result?: Transferable) => + callback(result ? result : undefined)); + } -app.use(whm(compiler)); + function getFields([ids, callback]: [string[], (result: Transferable[]) => void]) { + Database.Instance.getDocuments(ids, callback); + } -// start the Express server -app.listen(port, () => - console.log(`server started at http://localhost:${port}`)); + function setField(socket: Socket, newValue: Transferable) { + Database.Instance.update(newValue.id, newValue, () => + socket.broadcast.emit(MessageStore.SetField.Message, newValue)); + if (newValue.type === Types.Text) { + Search.Instance.updateDocument({ id: newValue.id, data: (newValue as any).data }); + console.log("set field"); + console.log("checking in"); + } + } -const server = io(); -interface Map { - [key: string]: Client; -} -let clients: Map = {}; + function GetRefField([id, callback]: [string, (result?: Transferable) => void]) { + Database.Instance.getDocument(id, callback, "newDocuments"); + } -let socketMap = new Map(); -let timeMap: { [id: string]: number } = {}; + function GetRefFields([ids, callback]: [string[], (result?: Transferable[]) => void]) { + Database.Instance.getDocuments(ids, callback, "newDocuments"); + } -server.on("connection", function (socket: Socket) { - socket.use((packet, next) => { - let id = socketMap.get(socket); - if (id) { - timeMap[id] = Date.now(); - } - next(); - }); + function HandleYoutubeQuery([query, callback]: [YoutubeQueryInput, (result?: any[]) => void]) { + switch (query.type) { + case YoutubeQueryType.Channels: + YoutubeApi.authorizedGetChannel(youtubeApiKey); + break; + case YoutubeQueryType.SearchVideo: + YoutubeApi.authorizedGetVideos(youtubeApiKey, query.userInput, callback); + case YoutubeQueryType.VideoDetails: + YoutubeApi.authorizedGetVideoDetails(youtubeApiKey, query.videoIds, callback); + } + } - Utils.Emit(socket, MessageStore.Foo, "handshooken"); - - Utils.AddServerHandler(socket, MessageStore.Bar, guid => barReceived(socket, guid)); - Utils.AddServerHandler(socket, MessageStore.SetField, (args) => setField(socket, args)); - Utils.AddServerHandlerCallback(socket, MessageStore.GetField, getField); - Utils.AddServerHandlerCallback(socket, MessageStore.GetFields, getFields); - if (!release) { - Utils.AddServerHandler(socket, MessageStore.DeleteAll, deleteFields); - } - - Utils.AddServerHandler(socket, MessageStore.CreateField, CreateField); - Utils.AddServerHandlerCallback(socket, MessageStore.YoutubeApiQuery, HandleYoutubeQuery); - Utils.AddServerHandler(socket, MessageStore.UpdateField, diff => UpdateField(socket, diff)); - Utils.AddServerHandler(socket, MessageStore.DeleteField, id => DeleteField(socket, id)); - Utils.AddServerHandler(socket, MessageStore.DeleteFields, ids => DeleteFields(socket, ids)); - Utils.AddServerHandlerCallback(socket, MessageStore.GetRefField, GetRefField); - Utils.AddServerHandlerCallback(socket, MessageStore.GetRefFields, GetRefFields); -}); - -async function deleteFields() { - await Database.Instance.deleteAll(); - await Search.Instance.clear(); - await Database.Instance.deleteAll('newDocuments'); -} + const credentialsPath = path.join(__dirname, "./credentials/google_docs_credentials.json"); + + const EndpointHandlerMap = new Map([ + ["create", (api, params) => api.create(params)], + ["retrieve", (api, params) => api.get(params)], + ["update", (api, params) => api.batchUpdate(params)], + ]); + + DashServer.post(RouteStore.googleDocs + "/:sector/:action", (req, res) => { + let sector: GoogleApiServerUtils.Service = req.params.sector as GoogleApiServerUtils.Service; + let action: GoogleApiServerUtils.Action = req.params.action as GoogleApiServerUtils.Action; + GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], { credentialsPath, userId: req.headers.userId as string }).then(endpoint => { + let handler = EndpointHandlerMap.get(action); + if (endpoint && handler) { + let execute = handler(endpoint, req.body).then( + response => res.send(response.data), + rejection => res.send(rejection) + ); + execute.catch(exception => res.send(exception)); + return; + } + res.send(undefined); + }); + }); -async function deleteAll() { - await Database.Instance.deleteAll(); - await Database.Instance.deleteAll('newDocuments'); - await Database.Instance.deleteAll('sessions'); - await Database.Instance.deleteAll('users'); - await Search.Instance.clear(); -} + router.addSupervisedRoute({ + method: Method.GET, + subscription: RouteStore.readGoogleAccessToken, + onValidation: async (user, _req, res) => { + const userId = user.id; + const token = await Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId); + const information = { credentialsPath, userId }; + if (!token) { + return res.send(await GoogleApiServerUtils.GenerateAuthenticationUrl(information)); + } + GoogleApiServerUtils.RetrieveAccessToken(information).then(token => res.send(token)); + } + }); -function barReceived(socket: SocketIO.Socket, guid: string) { - clients[guid] = new Client(guid.toString()); - console.log(`User ${guid} has connected`); - socketMap.set(socket, guid); -} + router.addSupervisedRoute({ + method: Method.POST, + subscription: RouteStore.writeGoogleAccessToken, + onValidation: async (user, req, res) => { + const userId = user.id; + const information = { credentialsPath, userId }; + res.send(await GoogleApiServerUtils.ProcessClientSideCode(information, req.body.authenticationCode)); + } + }); -function getField([id, callback]: [string, (result?: Transferable) => void]) { - Database.Instance.getDocument(id, (result?: Transferable) => - callback(result ? result : undefined)); -} + const tokenError = "Unable to successfully upload bytes for all images!"; + const mediaError = "Unable to convert all uploaded bytes to media items!"; + const userIdError = "Unable to parse the identification of the user!"; + + router.addSupervisedRoute({ + method: Method.POST, + subscription: RouteStore.googlePhotosMediaUpload, + onValidation: async (user, req, res) => { + const { media } = req.body; + const userId = user.id; + if (!userId) { + return _error(res, userIdError); + } -function getFields([ids, callback]: [string[], (result: Transferable[]) => void]) { - Database.Instance.getDocuments(ids, callback); -} + await GooglePhotosUploadUtils.initialize({ credentialsPath, userId }); + + let failed: number[] = []; + + const newMediaItems = await BatchedArray.from(media, { batchSize: 25 }).batchedMapPatientInterval( + { magnitude: 100, unit: TimeUnit.Milliseconds }, + async (batch: GooglePhotosUploadUtils.MediaInput[]) => { + const newMediaItems: NewMediaItem[] = []; + for (let index = 0; index < batch.length; index++) { + const element = batch[index]; + const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(element.url); + if (!uploadToken) { + failed.push(index); + } else { + newMediaItems.push({ + description: element.description, + simpleMediaItem: { uploadToken } + }); + } + } + return newMediaItems; + } + ); -function setField(socket: Socket, newValue: Transferable) { - Database.Instance.update(newValue.id, newValue, () => - socket.broadcast.emit(MessageStore.SetField.Message, newValue)); - if (newValue.type === Types.Text) { - Search.Instance.updateDocument({ id: newValue.id, data: (newValue as any).data }); - console.log("set field"); - console.log("checking in"); - } -} + const failedCount = failed.length; + if (failedCount) { + console.error(`Unable to upload ${failedCount} image${failedCount === 1 ? "" : "s"} to Google's servers`); + } -function GetRefField([id, callback]: [string, (result?: Transferable) => void]) { - Database.Instance.getDocument(id, callback, "newDocuments"); -} + GooglePhotosUploadUtils.CreateMediaItems(newMediaItems, req.body.album).then( + result => _success(res, { results: result.newMediaItemResults, failed }), + error => _error(res, mediaError, error) + ); + } + }); -function GetRefFields([ids, callback]: [string[], (result?: Transferable[]) => void]) { - Database.Instance.getDocuments(ids, callback, "newDocuments"); -} + interface MediaItem { + baseUrl: string; + filename: string; + } + const prefix = "google_photos_"; -function HandleYoutubeQuery([query, callback]: [YoutubeQueryInput, (result?: any[]) => void]) { - switch (query.type) { - case YoutubeQueryType.Channels: - YoutubeApi.authorizedGetChannel(youtubeApiKey); - break; - case YoutubeQueryType.SearchVideo: - YoutubeApi.authorizedGetVideos(youtubeApiKey, query.userInput, callback); - case YoutubeQueryType.VideoDetails: - YoutubeApi.authorizedGetVideoDetails(youtubeApiKey, query.videoIds, callback); - } -} + const downloadError = "Encountered an error while executing downloads."; + const requestError = "Unable to execute download: the body's media items were malformed."; + const deletionPermissionError = "Cannot perform specialized delete outside of the development environment!"; -const credentialsPath = path.join(__dirname, "./credentials/google_docs_credentials.json"); - -const EndpointHandlerMap = new Map([ - ["create", (api, params) => api.create(params)], - ["retrieve", (api, params) => api.get(params)], - ["update", (api, params) => api.batchUpdate(params)], -]); - -app.post(RouteStore.googleDocs + "/:sector/:action", (req, res) => { - let sector: GoogleApiServerUtils.Service = req.params.sector as GoogleApiServerUtils.Service; - let action: GoogleApiServerUtils.Action = req.params.action as GoogleApiServerUtils.Action; - GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], { credentialsPath, userId: req.headers.userId as string }).then(endpoint => { - let handler = EndpointHandlerMap.get(action); - if (endpoint && handler) { - let execute = handler(endpoint, req.body).then( - response => res.send(response.data), - rejection => res.send(rejection) - ); - execute.catch(exception => res.send(exception)); - return; - } - res.send(undefined); - }); -}); - -addSecureRoute({ - method: Method.GET, - subscribers: RouteStore.readGoogleAccessToken, - onValidation: async (user, _req, res) => { - const userId = user.id; - const token = await Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId); - const information = { credentialsPath, userId }; - if (!token) { - return res.send(await GoogleApiServerUtils.GenerateAuthenticationUrl(information)); - } - GoogleApiServerUtils.RetrieveAccessToken(information).then(token => res.send(token)); - } -}); - -addSecureRoute({ - method: Method.POST, - subscribers: RouteStore.writeGoogleAccessToken, - onValidation: async (user, req, res) => { - const userId = user.id; - const information = { credentialsPath, userId }; - res.send(await GoogleApiServerUtils.ProcessClientSideCode(information, req.body.authenticationCode)); - } -}); - -const tokenError = "Unable to successfully upload bytes for all images!"; -const mediaError = "Unable to convert all uploaded bytes to media items!"; -const userIdError = "Unable to parse the identification of the user!"; + DashServer.get("/deleteWithAux", async (_req, res) => { + if (release) { + return _permission_denied(res, deletionPermissionError); + } + await Database.Auxiliary.DeleteAll(); + res.redirect(RouteStore.delete); + }); -export interface NewMediaItem { - description: string; - simpleMediaItem: { - uploadToken: string; - }; -} + DashServer.get("/deleteWithGoogleCredentials", async (req, res) => { + if (release) { + return _permission_denied(res, deletionPermissionError); + } + await Database.Auxiliary.GoogleAuthenticationToken.DeleteAll(); + res.redirect(RouteStore.delete); + }); -addSecureRoute({ - method: Method.POST, - subscribers: RouteStore.googlePhotosMediaUpload, - onValidation: async (user, req, res) => { - const { media } = req.body; - const userId = user.id; - if (!userId) { - return _error(res, userIdError); - } + const UploadError = (count: number) => `Unable to upload ${count} images to Dash's server`; + DashServer.post(RouteStore.googlePhotosMediaDownload, async (req, res) => { + const contents: { mediaItems: MediaItem[] } = req.body; + let failed = 0; + if (contents) { + const completed: Opt[] = []; + for (let item of contents.mediaItems) { + const { contentSize, ...attributes } = await DashUploadUtils.InspectImage(item.baseUrl); + const found: Opt = await Database.Auxiliary.QueryUploadHistory(contentSize!); + if (!found) { + const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error)); + if (upload) { + completed.push(upload); + await Database.Auxiliary.LogUpload(upload); + } else { + failed++; + } + } else { + completed.push(found); + } + } + if (failed) { + return _error(res, UploadError(failed)); + } + return _success(res, completed); + } + _invalid(res, requestError); + }); - await GooglePhotosUploadUtils.initialize({ credentialsPath, userId }); + const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = { + "number": "_n", + "string": "_t", + "boolean": "_b", + "image": ["_t", "url"], + "video": ["_t", "url"], + "pdf": ["_t", "url"], + "audio": ["_t", "url"], + "web": ["_t", "url"], + "date": ["_d", value => new Date(value.date).toISOString()], + "proxy": ["_i", "fieldId"], + "list": ["_l", list => { + const results = []; + for (const value of list.fields) { + const term = ToSearchTerm(value); + if (term) { + results.push(term.value); + } + } + return results.length ? results : null; + }] + }; - let failed: number[] = []; + function ToSearchTerm(val: any): { suffix: string, value: any } | undefined { + if (val === null || val === undefined) { + return; + } + const type = val.__type || typeof val; + let suffix = suffixMap[type]; + if (!suffix) { + return; + } - const newMediaItems = await BatchedArray.from(media, { batchSize: 25 }).batchedMapPatientInterval( - { magnitude: 100, unit: TimeUnit.Milliseconds }, - async (batch: GooglePhotosUploadUtils.MediaInput[]) => { - const newMediaItems: NewMediaItem[] = []; - for (let index = 0; index < batch.length; index++) { - const element = batch[index]; - const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(element.url); - if (!uploadToken) { - failed.push(index); + if (Array.isArray(suffix)) { + const accessor = suffix[1]; + if (typeof accessor === "function") { + val = accessor(val); } else { - newMediaItems.push({ - description: element.description, - simpleMediaItem: { uploadToken } - }); + val = val[accessor]; } + suffix = suffix[0]; } - return newMediaItems; - } - ); - const failedCount = failed.length; - if (failedCount) { - console.error(`Unable to upload ${failedCount} image${failedCount === 1 ? "" : "s"} to Google's servers`); - } + return { suffix, value: val }; + } - GooglePhotosUploadUtils.CreateMediaItems(newMediaItems, req.body.album).then( - result => _success(res, { results: result.newMediaItemResults, failed }), - error => _error(res, mediaError, error) - ); - } -}); + function getSuffix(value: string | [string, any]): string { + return typeof value === "string" ? value : value[0]; + } -interface MediaItem { - baseUrl: string; - filename: string; -} -const prefix = "google_photos_"; - -const downloadError = "Encountered an error while executing downloads."; -const requestError = "Unable to execute download: the body's media items were malformed."; -const deletionPermissionError = "Cannot perform specialized delete outside of the development environment!"; - -app.get("/deleteWithAux", async (_req, res) => { - if (release) { - return _permission_denied(res, deletionPermissionError); - } - await Database.Auxiliary.DeleteAll(); - res.redirect(RouteStore.delete); -}); - -app.get("/deleteWithGoogleCredentials", async (req, res) => { - if (release) { - return _permission_denied(res, deletionPermissionError); - } - await Database.Auxiliary.GoogleAuthenticationToken.DeleteAll(); - res.redirect(RouteStore.delete); -}); - -const UploadError = (count: number) => `Unable to upload ${count} images to Dash's server`; -app.post(RouteStore.googlePhotosMediaDownload, async (req, res) => { - const contents: { mediaItems: MediaItem[] } = req.body; - let failed = 0; - if (contents) { - const completed: Opt[] = []; - for (let item of contents.mediaItems) { - const { contentSize, ...attributes } = await DashUploadUtils.InspectImage(item.baseUrl); - const found: Opt = await Database.Auxiliary.QueryUploadHistory(contentSize!); - if (!found) { - const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error)); - if (upload) { - completed.push(upload); - await Database.Auxiliary.LogUpload(upload); - } else { - failed++; + function UpdateField(socket: Socket, diff: Diff) { + Database.Instance.update(diff.id, diff.diff, + () => socket.broadcast.emit(MessageStore.UpdateField.Message, diff), false, "newDocuments"); + const docfield = diff.diff.$set; + if (!docfield) { + return; + } + const update: any = { id: diff.id }; + let dynfield = false; + for (let key in docfield) { + if (!key.startsWith("fields.")) continue; + dynfield = true; + let val = docfield[key]; + key = key.substring(7); + Object.values(suffixMap).forEach(suf => update[key + getSuffix(suf)] = { set: null }); + let term = ToSearchTerm(val); + if (term !== undefined) { + let { suffix, value } = term; + update[key + suffix] = { set: value }; + } + } + if (dynfield) { + Search.Instance.updateDocument(update); } - } else { - completed.push(found); - } - } - if (failed) { - return _error(res, UploadError(failed)); - } - return _success(res, completed); - } - _invalid(res, requestError); -}); - -const _error = (res: Response, message: string, error?: any) => { - res.statusMessage = message; - res.status(STATUS.EXECUTION_ERROR).send(error); -}; - -const _success = (res: Response, body: any) => { - res.status(STATUS.OK).send(body); -}; - -const _invalid = (res: Response, message: string) => { - res.statusMessage = message; - res.status(STATUS.BAD_REQUEST).send(); -}; - -const _permission_denied = (res: Response, message: string) => { - res.statusMessage = message; - res.status(STATUS.BAD_REQUEST).send("Permission Denied!"); -}; - -const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = { - "number": "_n", - "string": "_t", - "boolean": "_b", - "image": ["_t", "url"], - "video": ["_t", "url"], - "pdf": ["_t", "url"], - "audio": ["_t", "url"], - "web": ["_t", "url"], - "date": ["_d", value => new Date(value.date).toISOString()], - "proxy": ["_i", "fieldId"], - "list": ["_l", list => { - const results = []; - for (const value of list.fields) { - const term = ToSearchTerm(value); - if (term) { - results.push(term.value); } - } - return results.length ? results : null; - }] -}; - -function ToSearchTerm(val: any): { suffix: string, value: any } | undefined { - if (val === null || val === undefined) { - return; - } - const type = val.__type || typeof val; - let suffix = suffixMap[type]; - if (!suffix) { - return; - } - - if (Array.isArray(suffix)) { - const accessor = suffix[1]; - if (typeof accessor === "function") { - val = accessor(val); - } else { - val = val[accessor]; - } - suffix = suffix[0]; - } - - return { suffix, value: val }; -} -function getSuffix(value: string | [string, any]): string { - return typeof value === "string" ? value : value[0]; -} - -function UpdateField(socket: Socket, diff: Diff) { - Database.Instance.update(diff.id, diff.diff, - () => socket.broadcast.emit(MessageStore.UpdateField.Message, diff), false, "newDocuments"); - const docfield = diff.diff.$set; - if (!docfield) { - return; - } - const update: any = { id: diff.id }; - let dynfield = false; - for (let key in docfield) { - if (!key.startsWith("fields.")) continue; - dynfield = true; - let val = docfield[key]; - key = key.substring(7); - Object.values(suffixMap).forEach(suf => update[key + getSuffix(suf)] = { set: null }); - let term = ToSearchTerm(val); - if (term !== undefined) { - let { suffix, value } = term; - update[key + suffix] = { set: value }; - } - } - if (dynfield) { - Search.Instance.updateDocument(update); - } -} - -function DeleteField(socket: Socket, id: string) { - Database.Instance.delete({ _id: id }, "newDocuments").then(() => { - socket.broadcast.emit(MessageStore.DeleteField.Message, id); - }); + function DeleteField(socket: Socket, id: string) { + Database.Instance.delete({ _id: id }, "newDocuments").then(() => { + socket.broadcast.emit(MessageStore.DeleteField.Message, id); + }); - Search.Instance.deleteDocuments([id]); -} + Search.Instance.deleteDocuments([id]); + } -function DeleteFields(socket: Socket, ids: string[]) { - Database.Instance.delete({ _id: { $in: ids } }, "newDocuments").then(() => { - socket.broadcast.emit(MessageStore.DeleteFields.Message, ids); - }); + function DeleteFields(socket: Socket, ids: string[]) { + Database.Instance.delete({ _id: { $in: ids } }, "newDocuments").then(() => { + socket.broadcast.emit(MessageStore.DeleteFields.Message, ids); + }); - Search.Instance.deleteDocuments(ids); + Search.Instance.deleteDocuments(ids); -} + } -function CreateField(newValue: any) { - Database.Instance.insert(newValue, "newDocuments"); -} + function CreateField(newValue: any) { + Database.Instance.insert(newValue, "newDocuments"); + } -server.listen(serverPort); -console.log(`listening on port ${serverPort}`); + server.listen(serverPort); + console.log(`listening on port ${serverPort}`); + } + }); +})(); \ No newline at end of file -- cgit v1.2.3-70-g09d2 From 7b43e349d31c911ab43763a4ff7179b3778a2d96 Mon Sep 17 00:00:00 2001 From: Sam Wilkins <35748010+samwilkins333@users.noreply.github.com> Date: Wed, 16 Oct 2019 18:38:31 -0400 Subject: database separation and preliminary functions --- src/server/Initialization.ts | 81 +++++++++++++---------------- src/server/database.ts | 27 ++++++++++ src/server/index.ts | 118 ++++++++++++++++++++----------------------- 3 files changed, 117 insertions(+), 109 deletions(-) (limited to 'src') diff --git a/src/server/Initialization.ts b/src/server/Initialization.ts index e371a3edb..2c343ae90 100644 --- a/src/server/Initialization.ts +++ b/src/server/Initialization.ts @@ -7,8 +7,8 @@ import * as cookieParser from 'cookie-parser'; import expressFlash = require('express-flash'); import flash = require('connect-flash'); import { Database } from './database'; +import { getForgot, getLogin, getLogout, getReset, getSignup, postForgot, postLogin, postReset, postSignup } from './authentication/controllers/user_controller'; const MongoStore = require('connect-mongo')(session); -import mongoose, { ConnectionStates } from 'mongoose'; import { RouteStore } from './RouteStore'; import RouteManager from './RouteManager'; import * as webpack from 'webpack'; @@ -25,47 +25,26 @@ export interface InitializationOptions { export default async function InitializeServer(options: InitializationOptions) { const { listenAtPort, routeSetter } = options; - const server = injectMiddleware(express()); - const { url } = Database; - try { - await connectToDatabase(url); - } catch (e) { - console.error(`Mongoose FAILED to establish default connection at ${url}`); - console.error(e); - console.log('Since a valid database connection is required to use Dash, killing the server process.\nPlease try again later.'); - process.exit(1); - } - - // static file serving - server.use(express.static(__dirname + RouteStore.public)); - server.use(RouteStore.images, express.static(__dirname + RouteStore.public)); + const server = buildWithMiddleware(express()); routeSetter(new RouteManager(server, determineEnvironment())); + server.use(express.static(__dirname + RouteStore.public)); + server.use(RouteStore.images, express.static(__dirname + RouteStore.public)); + server.use(wdm(compiler, { publicPath: config.output.publicPath })); server.use(whm(compiler)); server.listen(listenAtPort, () => console.log(`server started at http://localhost:${listenAtPort}`)); - return server; -} - -function determineEnvironment() { - const isRelease = process.env.RELEASE === "true"; - - console.log(`running server in ${isRelease ? 'release' : 'debug'} mode`); - console.log(process.env.PWD); + registerAuthenticationRoutes(server); - let clientUtils = fs.readFileSync("./src/client/util/ClientUtils.ts.temp", "utf8"); - clientUtils = `//AUTO-GENERATED FILE: DO NOT EDIT\n${clientUtils.replace('"mode"', String(isRelease))}`; - fs.writeFileSync("./src/client/util/ClientUtils.ts", clientUtils, "utf8"); - - return isRelease; + return server; } const week = 7 * 24 * 60 * 60 * 1000; const secret = "64d6866242d3b5a5503c675b32c9605e4e90478e9b77bcf2bc"; -function injectMiddleware(server: express.Express) { +function buildWithMiddleware(server: express.Express) { [ cookieParser(), session({ @@ -90,21 +69,31 @@ function injectMiddleware(server: express.Express) { return server; } -async function connectToDatabase(url: string) { - const { connection } = mongoose; - process.on('SIGINT', () => { - connection.close(() => { - console.log('Mongoose default connection disconnected through app termination'); - process.exit(0); - }); - }); - if (connection.readyState === ConnectionStates.disconnected) { - return new Promise((resolve, reject) => { - connection.on('error', reject); - connection.on('connected', () => { - console.log(`Mongoose established default connection at ${url}`); - resolve(); - }); - }); - } +function determineEnvironment() { + const isRelease = process.env.RELEASE === "true"; + + console.log(`running server in ${isRelease ? 'release' : 'debug'} mode`); + console.log(process.env.PWD); + + let clientUtils = fs.readFileSync("./src/client/util/ClientUtils.ts.temp", "utf8"); + clientUtils = `//AUTO-GENERATED FILE: DO NOT EDIT\n${clientUtils.replace('"mode"', String(isRelease))}`; + fs.writeFileSync("./src/client/util/ClientUtils.ts", clientUtils, "utf8"); + + return isRelease; +} + +function registerAuthenticationRoutes(server: express.Express) { + server.get(RouteStore.signup, getSignup); + server.post(RouteStore.signup, postSignup); + + server.get(RouteStore.login, getLogin); + server.post(RouteStore.login, postLogin); + + server.get(RouteStore.logout, getLogout); + + server.get(RouteStore.forgot, getForgot); + server.post(RouteStore.forgot, postForgot); + + server.get(RouteStore.reset, getReset); + server.post(RouteStore.reset, postReset); } \ No newline at end of file diff --git a/src/server/database.ts b/src/server/database.ts index 25e1e67e0..4f93d1ee6 100644 --- a/src/server/database.ts +++ b/src/server/database.ts @@ -5,6 +5,7 @@ import { Utils, emptyFunction } from '../Utils'; import { DashUploadUtils } from './DashUploadUtils'; import { Credentials } from 'google-auth-library'; import { GoogleApiServerUtils } from './apis/google/GoogleApiServerUtils'; +import mongoose, { ConnectionStates } from 'mongoose'; export namespace Database { @@ -12,6 +13,32 @@ export namespace Database { const port = 27017; export const url = `mongodb://localhost:${port}/${schema}`; + export async function tryInitializeConnection() { + try { + const { connection } = mongoose; + process.on('SIGINT', () => { + connection.close(() => { + console.log('Mongoose default connection disconnected through app termination'); + process.exit(0); + }); + }); + if (connection.readyState === ConnectionStates.disconnected) { + await new Promise((resolve, reject) => { + connection.on('error', reject); + connection.on('connected', () => { + console.log(`Mongoose established default connection at ${url}`); + resolve(); + }); + }); + } + } catch (e) { + console.error(`Mongoose FAILED to establish default connection at ${url} with the following error:`); + console.error(e); + console.log('Since a valid database connection is required to use Dash, the server process will now exit.\nPlease try again later.'); + process.exit(1); + } + } + class Database { public static DocumentsCollection = 'documents'; private MongoClient = mongodb.MongoClient; diff --git a/src/server/index.ts b/src/server/index.ts index ad18857b6..ef618472b 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -11,7 +11,6 @@ import * as request from 'request'; import io from 'socket.io'; import { Socket } from 'socket.io'; import { Utils } from '../Utils'; -import { getForgot, getLogin, getLogout, getReset, getSignup, postForgot, postLogin, postReset, postSignup } from './authentication/controllers/user_controller'; import { Client } from './Client'; import { Database } from './database'; import { MessageStore, Transferable, Types, Diff, YoutubeQueryTypes as YoutubeQueryType, YoutubeQueryInput } from "./Message"; @@ -27,7 +26,6 @@ import { Response } from 'express-serve-static-core'; import { GoogleApiServerUtils } from "./apis/google/GoogleApiServerUtils"; const probe = require("probe-image-size"); const pdf = require('pdf-parse'); -var findInFiles = require('find-in-files'); import { GooglePhotosUploadUtils } from './apis/google/GooglePhotosUploadUtils'; import { Opt } from '../new_fields/Doc'; import { DashUploadUtils } from './DashUploadUtils'; @@ -38,6 +36,7 @@ import RouteSubscriber from './RouteSubscriber'; import InitializeServer from './Initialization'; import { Method, _success, _permission_denied, _error, _invalid } from './RouteManager'; import { command_line, read_text_file } from './ActionUtilities'; +var findInFiles = require('find-in-files'); let youtubeApiKey: string; @@ -51,10 +50,22 @@ export interface NewMediaItem { }; } -(async () => { - YoutubeApi.readApiKey((apiKey: string) => youtubeApiKey = apiKey); +const pngTypes = [".png", ".PNG"]; +const jpgTypes = [".jpg", ".JPG", ".jpeg", ".JPEG"]; +const uploadDirectory = __dirname + "/public/files/"; +const pdfDirectory = uploadDirectory + "text"; +const solrURL = "http://localhost:8983/solr/#/dash"; + +YoutubeApi.readApiKey((apiKey: string) => youtubeApiKey = apiKey); + +async function PreliminaryFunctions() { await GoogleApiServerUtils.LoadOAuthClient(); + await DashUploadUtils.createIfNotExists(pdfDirectory); + await Database.tryInitializeConnection(); +} +(async () => { + await PreliminaryFunctions(); await InitializeServer({ listenAtPort: 1050, routeSetter: router => { @@ -73,6 +84,26 @@ export interface NewMediaItem { } }); + router.addSupervisedRoute({ + method: Method.GET, + subscription: "/textsearch", + onValidation: async (_user, req, res) => { + let q = req.query.q; + if (q === undefined) { + res.send([]); + return; + } + let results = await findInFiles.find({ 'term': q, 'flags': 'ig' }, uploadDirectory + "text", ".txt$"); + let resObj: { ids: string[], numFound: number, lines: string[] } = { ids: [], numFound: 0, lines: [] }; + for (var result in results) { + resObj.ids.push(path.basename(result, ".txt").replace(/upload_/, "")); + resObj.lines.push(results[result].line); + resObj.numFound++; + } + res.send(resObj); + } + }); + router.addSupervisedRoute({ method: Method.GET, subscription: "/buxton", @@ -101,36 +132,19 @@ export interface NewMediaItem { } }); - // SEARCH - const solrURL = "http://localhost:8983/solr/#/dash"; - - // GETTERS - - DashServer.get("/textsearch", async (req, res) => { - let q = req.query.q; - if (q === undefined) { - res.send([]); - return; - } - let results = await findInFiles.find({ 'term': q, 'flags': 'ig' }, uploadDirectory + "text", ".txt$"); - let resObj: { ids: string[], numFound: number, lines: string[] } = { ids: [], numFound: 0, lines: [] }; - for (var result in results) { - resObj.ids.push(path.basename(result, ".txt").replace(/upload_/, "")); - resObj.lines.push(results[result].line); - resObj.numFound++; - } - res.send(resObj); - }); - - DashServer.get("/search", async (req, res) => { - const solrQuery: any = {}; - ["q", "fq", "start", "rows", "hl", "hl.fl"].forEach(key => solrQuery[key] = req.query[key]); - if (solrQuery.q === undefined) { - res.send([]); - return; + router.addSupervisedRoute({ + method: Method.GET, + subscription: "/search", + onValidation: async (_user, req, res) => { + const solrQuery: any = {}; + ["q", "fq", "start", "rows", "hl", "hl.fl"].forEach(key => solrQuery[key] = req.query[key]); + if (solrQuery.q === undefined) { + res.send([]); + return; + } + let results = await Search.Instance.search(solrQuery); + res.send(results); } - let results = await Search.Instance.search(solrQuery); - res.send(results); }); function msToTime(duration: number) { @@ -210,9 +224,14 @@ export interface NewMediaItem { await Database.Instance.visit([id], fn); return { id, docs, files }; } - DashServer.get("/serializeDoc/:docId", async (req, res) => { - const { docs, files } = await getDocs(req.params.docId); - res.send({ docs, files: Array.from(files) }); + + router.addSupervisedRoute({ + method: Method.GET, + subscription: new RouteSubscriber("/serializeDoc").add("docId"), + onValidation: async (_user, req, res) => { + const { docs, files } = await getDocs(req.params.docId); + res.send({ docs, files: Array.from(files) }); + } }); router.addSupervisedRoute({ @@ -554,12 +573,6 @@ export interface NewMediaItem { } } - const pngTypes = [".png", ".PNG"]; - const jpgTypes = [".jpg", ".JPG", ".jpeg", ".JPEG"]; - const uploadDirectory = __dirname + "/public/files/"; - const pdfDirectory = uploadDirectory + "text"; - DashUploadUtils.createIfNotExists(pdfDirectory); - interface ImageFileResponse { name: string; path: string; @@ -657,27 +670,6 @@ export interface NewMediaItem { } }); - // AUTHENTICATION - - // Sign Up - DashServer.get(RouteStore.signup, getSignup); - DashServer.post(RouteStore.signup, postSignup); - - // Log In - DashServer.get(RouteStore.login, getLogin); - DashServer.post(RouteStore.login, postLogin); - - // Log Out - DashServer.get(RouteStore.logout, getLogout); - - // FORGOT PASSWORD EMAIL HANDLING - DashServer.get(RouteStore.forgot, getForgot); - DashServer.post(RouteStore.forgot, postForgot); - - // RESET PASSWORD EMAIL HANDLING - DashServer.get(RouteStore.reset, getReset); - DashServer.post(RouteStore.reset, postReset); - const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/; DashServer.use(RouteStore.corsProxy, (req, res) => { req.pipe(request(decodeURIComponent(req.url.substring(1)))).on("response", res => { -- cgit v1.2.3-70-g09d2 From 91868727ea6e6443a916cf720d477b1136601b2f Mon Sep 17 00:00:00 2001 From: Sam Wilkins <35748010+samwilkins333@users.noreply.github.com> Date: Thu, 17 Oct 2019 02:53:34 -0400 Subject: refactored handlers --- src/client/util/SelectionManager.ts | 5 +- src/server/Initialization.ts | 28 +- src/server/RouteManager.ts | 60 ++- src/server/authentication/models/user_model.ts | 14 +- src/server/database.ts | 14 +- src/server/index.ts | 495 +++++++++++++------------ 6 files changed, 343 insertions(+), 273 deletions(-) (limited to 'src') diff --git a/src/client/util/SelectionManager.ts b/src/client/util/SelectionManager.ts index df1b46b33..398c90ddb 100644 --- a/src/client/util/SelectionManager.ts +++ b/src/client/util/SelectionManager.ts @@ -54,7 +54,10 @@ export namespace SelectionManager { let stored = StrCast(targetDoc.backgroundColor); stored.length > 0 && (targetColor = stored); } - InkingControl.Instance.updateSelectedColor(targetColor); + const { Instance } = InkingControl; + if (Instance) { + Instance.updateSelectedColor(targetColor); + } }, { fireImmediately: true }); export function DeselectDoc(docView: DocumentView): void { diff --git a/src/server/Initialization.ts b/src/server/Initialization.ts index 2c343ae90..9646dc195 100644 --- a/src/server/Initialization.ts +++ b/src/server/Initialization.ts @@ -17,6 +17,7 @@ const compiler = webpack(config); import * as wdm from 'webpack-dev-middleware'; import * as whm from 'webpack-hot-middleware'; import * as fs from 'fs'; +import * as request from 'request'; export interface InitializationOptions { listenAtPort: number; @@ -27,18 +28,18 @@ export default async function InitializeServer(options: InitializationOptions) { const { listenAtPort, routeSetter } = options; const server = buildWithMiddleware(express()); - routeSetter(new RouteManager(server, determineEnvironment())); - server.use(express.static(__dirname + RouteStore.public)); server.use(RouteStore.images, express.static(__dirname + RouteStore.public)); server.use(wdm(compiler, { publicPath: config.output.publicPath })); server.use(whm(compiler)); - server.listen(listenAtPort, () => console.log(`server started at http://localhost:${listenAtPort}`)); registerAuthenticationRoutes(server); + registerCorsProxy(server); - return server; + routeSetter(new RouteManager(server, determineEnvironment())); + + server.listen(listenAtPort, () => console.log(`server started at http://localhost:${listenAtPort}`)); } const week = 7 * 24 * 60 * 60 * 1000; @@ -96,4 +97,23 @@ function registerAuthenticationRoutes(server: express.Express) { server.get(RouteStore.reset, getReset); server.post(RouteStore.reset, postReset); +} + +function registerCorsProxy(server: express.Express) { + const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/; + server.use(RouteStore.corsProxy, (req, res) => { + req.pipe(request(decodeURIComponent(req.url.substring(1)))).on("response", res => { + const headers = Object.keys(res.headers); + headers.forEach(headerName => { + const header = res.headers[headerName]; + if (Array.isArray(header)) { + res.headers[headerName] = header.filter(h => !headerCharRegex.test(h)); + } else if (header) { + if (headerCharRegex.test(header as any)) { + delete res.headers[headerName]; + } + } + }); + }).pipe(res); + }); } \ No newline at end of file diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts index cf15e45c9..626014d1a 100644 --- a/src/server/RouteManager.ts +++ b/src/server/RouteManager.ts @@ -3,6 +3,7 @@ import { RouteStore } from "./RouteStore"; import { DashUserModel } from "./authentication/models/user_model"; import * as express from 'express'; import * as qs from 'query-string'; +import { Opt } from "../new_fields/Doc"; export default class RouteManager { private server: express.Express; @@ -27,29 +28,34 @@ export default class RouteManager { * @param subscribers the forward slash prepended path names (reference and add to RouteStore.ts) that will all invoke the given @param handler */ addSupervisedRoute(initializer: RouteInitializer) { - const { method, subscription, onValidation, onRejection, onError } = initializer; - const release = this._isRelease; + const { method, subscription, onValidation, onRejection, onError, onGuestAccess } = initializer; + const isRelease = this._isRelease; let abstracted = async (req: express.Request, res: express.Response) => { const { user, originalUrl: target } = req; - if (user || isSharedDocAccess(target)) { + const core = { req, res, isRelease: isRelease }; + if (user) { try { - await onValidation(user, req, res, release); + await onValidation({ ...core, user: user as any }); } catch (e) { if (onError) { - onError(req, res, e, release); + onError({ ...core, error: e }); } else { _error(res, `The server encountered an internal error handling ${target}.`, e); } } } else { - req.session!.target = target; - try { - await (onRejection || LoginRedirect)(req, res, release); - } catch (e) { - if (onError) { - onError(req, res, e, this._isRelease); - } else { - _error(res, `The server encountered an internal error when rejecting ${target}.`, e); + if (isGuestAccess(req) && onGuestAccess) { + await onGuestAccess(core); + } else { + req.session!.target = target; + try { + await (onRejection || LoginRedirect)(core); + } catch (e) { + if (onError) { + onError({ ...core, error: e }); + } else { + _error(res, `The server encountered an internal error when rejecting ${target}.`, e); + } } } } @@ -84,18 +90,25 @@ export enum Method { POST } -export type ValidationHandler = (user: DashUserModel, req: express.Request, res: express.Response, isRelease: boolean) => any | Promise; -export type RejectionHandler = (req: express.Request, res: express.Response, isRelease: boolean) => any | Promise; -export type ErrorHandler = (req: express.Request, res: express.Response, error: any, isRelease: boolean) => any | Promise; +export interface CoreArguments { + req: express.Request, + res: express.Response, + isRelease: boolean; +} -const LoginRedirect: RejectionHandler = (_req, res) => res.redirect(RouteStore.login); +export type OnValidation = (core: CoreArguments & { user: DashUserModel }) => any | Promise; +export type OnUnauthenticated = (core: CoreArguments) => any | Promise; +export type OnError = (core: CoreArguments & { error: any }) => any | Promise; + +const LoginRedirect: OnUnauthenticated = ({ res }) => res.redirect(RouteStore.login); export interface RouteInitializer { method: Method; subscription: string | RouteSubscriber | (string | RouteSubscriber)[]; - onValidation: ValidationHandler; - onRejection?: RejectionHandler; - onError?: ErrorHandler; + onValidation: OnValidation; + onRejection?: OnUnauthenticated; + onGuestAccess?: OnUnauthenticated; + onError?: OnError; } const isSharedDocAccess = (target: string) => { @@ -104,6 +117,13 @@ const isSharedDocAccess = (target: string) => { return shared && docAccess; }; +const isGuestAccess = (req: express.Request) => { + if (isSharedDocAccess(req.originalUrl)) { + return true; + } + return false; +} + export const STATUS = { OK: 200, BAD_REQUEST: 400, diff --git a/src/server/authentication/models/user_model.ts b/src/server/authentication/models/user_model.ts index 45fbf23b1..cc670a03a 100644 --- a/src/server/authentication/models/user_model.ts +++ b/src/server/authentication/models/user_model.ts @@ -1,20 +1,8 @@ //@ts-ignore import * as bcrypt from "bcrypt-nodejs"; //@ts-ignore -import * as mongoose from "mongoose"; -var url = 'mongodb://localhost:27017/Dash'; +import * as mongoose from 'mongoose'; -mongoose.connect(url, { useNewUrlParser: true }); - -mongoose.connection.on('connected', function () { - console.log('Stablished connection on ' + url); -}); -mongoose.connection.on('error', function (error) { - console.log('Something wrong happened: ' + error); -}); -mongoose.connection.on('disconnected', function () { - console.log('connection closed'); -}); export type DashUserModel = mongoose.Document & { email: String, password: string, diff --git a/src/server/database.ts b/src/server/database.ts index 4f93d1ee6..44c49d03e 100644 --- a/src/server/database.ts +++ b/src/server/database.ts @@ -5,7 +5,7 @@ import { Utils, emptyFunction } from '../Utils'; import { DashUploadUtils } from './DashUploadUtils'; import { Credentials } from 'google-auth-library'; import { GoogleApiServerUtils } from './apis/google/GoogleApiServerUtils'; -import mongoose, { ConnectionStates } from 'mongoose'; +import * as mongoose from 'mongoose'; export namespace Database { @@ -13,6 +13,14 @@ export namespace Database { const port = 27017; export const url = `mongodb://localhost:${port}/${schema}`; + enum ConnectionStates { + disconnected = 0, + connected = 1, + connecting = 2, + disconnecting = 3, + uninitialized = 99, + } + export async function tryInitializeConnection() { try { const { connection } = mongoose; @@ -25,10 +33,14 @@ export namespace Database { if (connection.readyState === ConnectionStates.disconnected) { await new Promise((resolve, reject) => { connection.on('error', reject); + connection.on('disconnected', () => { + console.log(`Mongoose connection at ${url} now closed`); + }); connection.on('connected', () => { console.log(`Mongoose established default connection at ${url}`); resolve(); }); + mongoose.connect(url, { useNewUrlParser: true }); }); } } catch (e) { diff --git a/src/server/index.ts b/src/server/index.ts index ef618472b..bba8fc292 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -7,8 +7,7 @@ import * as Pdfjs from 'pdfjs-dist'; const imageDataUri = require('image-data-uri'); import * as mobileDetect from 'mobile-detect'; import * as path from 'path'; -import * as request from 'request'; -import io from 'socket.io'; +import * as io from 'socket.io'; import { Socket } from 'socket.io'; import { Utils } from '../Utils'; import { Client } from './Client'; @@ -34,8 +33,8 @@ import { ParsedPDF } from "./PdfTypes"; import { reject } from 'bluebird'; import RouteSubscriber from './RouteSubscriber'; import InitializeServer from './Initialization'; -import { Method, _success, _permission_denied, _error, _invalid } from './RouteManager'; -import { command_line, read_text_file } from './ActionUtilities'; +import { Method, _success, _permission_denied, _error, _invalid, OnUnauthenticated } from './RouteManager'; +import { command_line } from './ActionUtilities'; var findInFiles = require('find-in-files'); let youtubeApiKey: string; @@ -73,7 +72,7 @@ async function PreliminaryFunctions() { router.addSupervisedRoute({ method: Method.GET, subscription: "/pull", - onValidation: (_user, _req, res) => { + onValidation: ({ res }) => { exec('"C:\\Program Files\\Git\\git-bash.exe" -c "git pull"', err => { if (err) { res.send(err.message); @@ -87,7 +86,7 @@ async function PreliminaryFunctions() { router.addSupervisedRoute({ method: Method.GET, subscription: "/textsearch", - onValidation: async (_user, req, res) => { + onValidation: async ({ req, res }) => { let q = req.query.q; if (q === undefined) { res.send([]); @@ -107,7 +106,7 @@ async function PreliminaryFunctions() { router.addSupervisedRoute({ method: Method.GET, subscription: "/buxton", - onValidation: (_user, _req, res) => { + onValidation: ({ res }) => { let cwd = '../scraping/buxton'; let onResolved = (stdout: string) => { console.log(stdout); res.redirect("/"); }; @@ -121,7 +120,7 @@ async function PreliminaryFunctions() { router.addSupervisedRoute({ method: Method.GET, subscription: "/version", - onValidation: (_user, _req, res) => { + onValidation: ({ res }) => { exec('"C:\\Program Files\\Git\\bin\\git.exe" rev-parse HEAD', (err, stdout) => { if (err) { res.send(err.message); @@ -135,7 +134,7 @@ async function PreliminaryFunctions() { router.addSupervisedRoute({ method: Method.GET, subscription: "/search", - onValidation: async (_user, req, res) => { + onValidation: async ({ req, res }) => { const solrQuery: any = {}; ["q", "fq", "start", "rows", "hl", "hl.fl"].forEach(key => solrQuery[key] = req.query[key]); if (solrQuery.q === undefined) { @@ -228,7 +227,7 @@ async function PreliminaryFunctions() { router.addSupervisedRoute({ method: Method.GET, subscription: new RouteSubscriber("/serializeDoc").add("docId"), - onValidation: async (_user, req, res) => { + onValidation: async ({ req, res }) => { const { docs, files } = await getDocs(req.params.docId); res.send({ docs, files: Array.from(files) }); } @@ -237,7 +236,7 @@ async function PreliminaryFunctions() { router.addSupervisedRoute({ method: Method.GET, subscription: new RouteSubscriber(RouteStore.imageHierarchyExport).add('docId'), - onValidation: async (_user, req, res) => { + onValidation: async ({ req, res }) => { const id = req.params.docId; const hierarchy: Hierarchy = {}; await targetedVisitorRecursive(id, hierarchy); @@ -305,154 +304,171 @@ async function PreliminaryFunctions() { } }; - DashServer.get("/downloadId/:docId", async (req, res) => { - res.set('Content-disposition', `attachment;`); - res.set('Content-Type', "application/zip"); - const { id, docs, files } = await getDocs(req.params.docId); - const docString = JSON.stringify({ id, docs }); - const zip = Archiver('zip'); - zip.pipe(res); - zip.append(docString, { name: "doc.json" }); - files.forEach(val => { - zip.file(__dirname + RouteStore.public + val, { name: val.substring(1) }); - }); - zip.finalize(); - }); + router.addSupervisedRoute({ + method: Method.GET, + subscription: new RouteSubscriber("/downloadId").add("docId"), + onValidation: async ({ req, res }) => { + res.set('Content-disposition', `attachment;`); + res.set('Content-Type', "application/zip"); + const { id, docs, files } = await getDocs(req.params.docId); + const docString = JSON.stringify({ id, docs }); + const zip = Archiver('zip'); + zip.pipe(res); + zip.append(docString, { name: "doc.json" }); + files.forEach(val => { + zip.file(__dirname + RouteStore.public + val, { name: val.substring(1) }); + }); + zip.finalize(); + } + }) - DashServer.post("/uploadDoc", (req, res) => { - let form = new formidable.IncomingForm(); - form.keepExtensions = true; - // let path = req.body.path; - const ids: { [id: string]: string } = {}; - let remap = true; - const getId = (id: string): string => { - if (!remap) return id; - if (id.endsWith("Proto")) return id; - if (id in ids) { - return ids[id]; - } else { - return ids[id] = v4(); - } - }; - const mapFn = (doc: any) => { - if (doc.id) { - doc.id = getId(doc.id); - } - for (const key in doc.fields) { - if (!doc.fields.hasOwnProperty(key)) { - continue; + router.addSupervisedRoute({ + method: Method.POST, + subscription: "/uploadDoc", + onValidation: ({ req, res }) => { + let form = new formidable.IncomingForm(); + form.keepExtensions = true; + // let path = req.body.path; + const ids: { [id: string]: string } = {}; + let remap = true; + const getId = (id: string): string => { + if (!remap) return id; + if (id.endsWith("Proto")) return id; + if (id in ids) { + return ids[id]; + } else { + return ids[id] = v4(); } - const field = doc.fields[key]; - if (field === undefined || field === null) { - continue; + }; + const mapFn = (doc: any) => { + if (doc.id) { + doc.id = getId(doc.id); } + for (const key in doc.fields) { + if (!doc.fields.hasOwnProperty(key)) { + continue; + } + const field = doc.fields[key]; + if (field === undefined || field === null) { + continue; + } - if (field.__type === "proxy" || field.__type === "prefetch_proxy") { - field.fieldId = getId(field.fieldId); - } else if (field.__type === "script" || field.__type === "computed") { - if (field.captures) { - field.captures.fieldId = getId(field.captures.fieldId); + if (field.__type === "proxy" || field.__type === "prefetch_proxy") { + field.fieldId = getId(field.fieldId); + } else if (field.__type === "script" || field.__type === "computed") { + if (field.captures) { + field.captures.fieldId = getId(field.captures.fieldId); + } + } else if (field.__type === "list") { + mapFn(field); + } else if (typeof field === "string") { + const re = /("(?:dataD|d)ocumentId"\s*:\s*")([\w\-]*)"/g; + doc.fields[key] = (field as any).replace(re, (match: any, p1: string, p2: string) => { + return `${p1}${getId(p2)}"`; + }); + } else if (field.__type === "RichTextField") { + const re = /("href"\s*:\s*")(.*?)"/g; + field.Data = field.Data.replace(re, (match: any, p1: string, p2: string) => { + return `${p1}${getId(p2)}"`; + }); } - } else if (field.__type === "list") { - mapFn(field); - } else if (typeof field === "string") { - const re = /("(?:dataD|d)ocumentId"\s*:\s*")([\w\-]*)"/g; - doc.fields[key] = (field as any).replace(re, (match: any, p1: string, p2: string) => { - return `${p1}${getId(p2)}"`; - }); - } else if (field.__type === "RichTextField") { - const re = /("href"\s*:\s*")(.*?)"/g; - field.Data = field.Data.replace(re, (match: any, p1: string, p2: string) => { - return `${p1}${getId(p2)}"`; - }); } - } - }; - form.parse(req, async (err, fields, files) => { - remap = fields.remap !== "false"; - let id: string = ""; - try { - for (const name in files) { - const path_2 = files[name].path; - const zip = new AdmZip(path_2); - zip.getEntries().forEach((entry: any) => { - if (!entry.entryName.startsWith("files/")) return; - let dirname = path.dirname(entry.entryName) + "/"; - let extname = path.extname(entry.entryName); - let basename = path.basename(entry.entryName).split(".")[0]; - // zip.extractEntryTo(dirname + basename + "_o" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_s" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_m" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_l" + extname, __dirname + RouteStore.public, true, false); + }; + form.parse(req, async (err, fields, files) => { + remap = fields.remap !== "false"; + let id: string = ""; + try { + for (const name in files) { + const path_2 = files[name].path; + const zip = new AdmZip(path_2); + zip.getEntries().forEach((entry: any) => { + if (!entry.entryName.startsWith("files/")) return; + let dirname = path.dirname(entry.entryName) + "/"; + let extname = path.extname(entry.entryName); + let basename = path.basename(entry.entryName).split(".")[0]; + // zip.extractEntryTo(dirname + basename + "_o" + extname, __dirname + RouteStore.public, true, false); + // zip.extractEntryTo(dirname + basename + "_s" + extname, __dirname + RouteStore.public, true, false); + // zip.extractEntryTo(dirname + basename + "_m" + extname, __dirname + RouteStore.public, true, false); + // zip.extractEntryTo(dirname + basename + "_l" + extname, __dirname + RouteStore.public, true, false); + try { + zip.extractEntryTo(entry.entryName, __dirname + RouteStore.public, true, false); + dirname = "/" + dirname; + + fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_o" + extname)); + fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_s" + extname)); + fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_m" + extname)); + fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_l" + extname)); + } catch (e) { + console.log(e); + } + }); + const json = zip.getEntry("doc.json"); + let docs: any; try { - zip.extractEntryTo(entry.entryName, __dirname + RouteStore.public, true, false); - dirname = "/" + dirname; - - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_o" + extname)); - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_s" + extname)); - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_m" + extname)); - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_l" + extname)); - } catch (e) { - console.log(e); + let data = JSON.parse(json.getData().toString("utf8")); + docs = data.docs; + id = data.id; + docs = Object.keys(docs).map(key => docs[key]); + docs.forEach(mapFn); + await Promise.all(docs.map((doc: any) => new Promise(res => Database.Instance.replace(doc.id, doc, (err, r) => { + err && console.log(err); + res(); + }, true, "newDocuments")))); + } catch (e) { console.log(e); } + fs.unlink(path_2, () => { }); + } + if (id) { + res.send(JSON.stringify(getId(id))); + } else { + res.send(JSON.stringify("error")); + } + } catch (e) { console.log(e); } + }); + } + }) + + router.addSupervisedRoute({ + method: Method.GET, + subscription: "/whosOnline", + onValidation: ({ res }) => { + let users: any = { active: {}, inactive: {} }; + const now = Date.now(); + + for (const user in timeMap) { + const time = timeMap[user]; + const key = ((now - time) / 1000) < (60 * 5) ? "active" : "inactive"; + users[key][user] = `Last active ${msToTime(now - time)} ago`; + } + + res.send(users); + } + }); + + router.addSupervisedRoute({ + method: Method.GET, + subscription: new RouteSubscriber("/thumbnail").add("filename"), + onValidation: ({ req, res }) => { + let filename = req.params.filename; + let noExt = filename.substring(0, filename.length - ".png".length); + let pagenumber = parseInt(noExt.split('-')[1]); + fs.exists(uploadDirectory + filename, (exists: boolean) => { + console.log(`${uploadDirectory + filename} ${exists ? "exists" : "does not exist"}`); + if (exists) { + let input = fs.createReadStream(uploadDirectory + filename); + probe(input, (err: any, result: any) => { + if (err) { + console.log(err); + console.log(`error on ${filename}`); + return; } + res.send({ path: "/files/" + filename, width: result.width, height: result.height }); }); - const json = zip.getEntry("doc.json"); - let docs: any; - try { - let data = JSON.parse(json.getData().toString("utf8")); - docs = data.docs; - id = data.id; - docs = Object.keys(docs).map(key => docs[key]); - docs.forEach(mapFn); - await Promise.all(docs.map((doc: any) => new Promise(res => Database.Instance.replace(doc.id, doc, (err, r) => { - err && console.log(err); - res(); - }, true, "newDocuments")))); - } catch (e) { console.log(e); } - fs.unlink(path_2, () => { }); } - if (id) { - res.send(JSON.stringify(getId(id))); - } else { - res.send(JSON.stringify("error")); + else { + LoadPage(uploadDirectory + filename.substring(0, filename.length - noExt.split('-')[1].length - ".PNG".length - 1) + ".pdf", pagenumber, res); } - } catch (e) { console.log(e); } - }); - }); - - DashServer.get("/whosOnline", (req, res) => { - let users: any = { active: {}, inactive: {} }; - const now = Date.now(); - - for (const user in timeMap) { - const time = timeMap[user]; - const key = ((now - time) / 1000) < (60 * 5) ? "active" : "inactive"; - users[key][user] = `Last active ${msToTime(now - time)} ago`; + }); } - - res.send(users); - }); - DashServer.get("/thumbnail/:filename", (req, res) => { - let filename = req.params.filename; - let noExt = filename.substring(0, filename.length - ".png".length); - let pagenumber = parseInt(noExt.split('-')[1]); - fs.exists(uploadDirectory + filename, (exists: boolean) => { - console.log(`${uploadDirectory + filename} ${exists ? "exists" : "does not exist"}`); - if (exists) { - let input = fs.createReadStream(uploadDirectory + filename); - probe(input, (err: any, result: any) => { - if (err) { - console.log(err); - console.log(`error on ${filename}`); - return; - } - res.send({ path: "/files/" + filename, width: result.width, height: result.height }); - }); - } - else { - LoadPage(uploadDirectory + filename.substring(0, filename.length - noExt.split('-')[1].length - ".PNG".length - 1) + ".pdf", pagenumber, res); - } - }); }); function LoadPage(file: string, pageNumber: number, res: Response) { @@ -498,41 +514,44 @@ async function PreliminaryFunctions() { router.addSupervisedRoute({ method: Method.GET, subscription: RouteStore.root, - onValidation: (_user, _req, res) => res.redirect(RouteStore.home) + onValidation: ({ res }) => res.redirect(RouteStore.home) }); router.addSupervisedRoute({ method: Method.GET, subscription: RouteStore.getUsers, - onValidation: async (_user, _req, res) => { + onValidation: async ({ res }) => { const cursor = await Database.Instance.query({}, { email: 1, userDocumentId: 1 }, "users"); const results = await cursor.toArray(); res.send(results.map(user => ({ email: user.email, userDocumentId: user.userDocumentId }))); - }, + } }); + const serve: OnUnauthenticated = ({ req, res }) => { + let detector = new mobileDetect(req.headers['user-agent'] || ""); + let filename = detector.mobile() !== null ? 'mobile/image.html' : 'index.html'; + res.sendFile(path.join(__dirname, '../../deploy/' + filename)); + } + router.addSupervisedRoute({ method: Method.GET, - subscription: [RouteStore.home, RouteStore.openDocumentWithId], - onValidation: (_user, req, res) => { - let detector = new mobileDetect(req.headers['user-agent'] || ""); - let filename = detector.mobile() !== null ? 'mobile/image.html' : 'index.html'; - res.sendFile(path.join(__dirname, '../../deploy/' + filename)); - }, + subscription: [RouteStore.home, new RouteSubscriber("/doc").add("docId")], + onValidation: serve, + onGuestAccess: serve }); router.addSupervisedRoute({ method: Method.GET, subscription: RouteStore.getUserDocumentId, - onValidation: (user, _req, res) => res.send(user.userDocumentId), - onRejection: (_req, res) => res.send(undefined) + onValidation: ({ res, user }) => res.send(user.userDocumentId), + onRejection: ({ res }) => res.send(undefined) }); router.addSupervisedRoute({ method: Method.GET, subscription: RouteStore.getCurrUser, - onValidation: (user, _req, res) => { res.send(JSON.stringify(user)); }, - onRejection: (_req, res) => res.send(JSON.stringify({ id: "__guest__", email: "" })) + onValidation: ({ res, user }) => { res.send(JSON.stringify(user)); }, + onRejection: ({ res }) => res.send(JSON.stringify({ id: "__guest__", email: "" })) }); const ServicesApiKeyMap = new Map([ @@ -544,7 +563,7 @@ async function PreliminaryFunctions() { router.addSupervisedRoute({ method: Method.GET, subscription: new RouteSubscriber(RouteStore.cognitiveServices).add('requestedservice'), - onValidation: (_user, req, res) => { + onValidation: ({ req, res }) => { let service = req.params.requestedservice; res.send(ServicesApiKeyMap.get(service)); } @@ -583,7 +602,7 @@ async function PreliminaryFunctions() { router.addSupervisedRoute({ method: Method.POST, subscription: RouteStore.upload, - onValidation: (_user, req, res) => { + onValidation: ({ req, res }) => { let form = new formidable.IncomingForm(); form.uploadDir = uploadDirectory; form.keepExtensions = true; @@ -621,7 +640,7 @@ async function PreliminaryFunctions() { router.addSupervisedRoute({ method: Method.POST, subscription: RouteStore.inspectImage, - onValidation: async (_user, req, res) => { + onValidation: async ({ req, res }) => { const { source } = req.body; if (typeof source === "string") { const uploadInformation = await DashUploadUtils.UploadImage(source); @@ -634,7 +653,7 @@ async function PreliminaryFunctions() { router.addSupervisedRoute({ method: Method.POST, subscription: RouteStore.dataUriToImage, - onValidation: (_user, req, res) => { + onValidation: ({ req, res }) => { const uri = req.body.uri; const filename = req.body.name; if (!uri || !filename) { @@ -670,27 +689,10 @@ async function PreliminaryFunctions() { } }); - const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/; - DashServer.use(RouteStore.corsProxy, (req, res) => { - req.pipe(request(decodeURIComponent(req.url.substring(1)))).on("response", res => { - const headers = Object.keys(res.headers); - headers.forEach(headerName => { - const header = res.headers[headerName]; - if (Array.isArray(header)) { - res.headers[headerName] = header.filter(h => !headerCharRegex.test(h)); - } else if (header) { - if (headerCharRegex.test(header as any)) { - delete res.headers[headerName]; - } - } - }); - }).pipe(res); - }); - router.addSupervisedRoute({ method: Method.GET, subscription: RouteStore.delete, - onValidation: (_user, _req, res, isRelease) => { + onValidation: ({ res, isRelease }) => { if (isRelease) { return _permission_denied(res, deletionPermissionError); } @@ -701,7 +703,7 @@ async function PreliminaryFunctions() { router.addSupervisedRoute({ method: Method.GET, subscription: RouteStore.deleteAll, - onValidation: (_user, _req, res, isRelease) => { + onValidation: ({ res, isRelease }) => { if (isRelease) { return _permission_denied(res, deletionPermissionError); } @@ -813,27 +815,34 @@ async function PreliminaryFunctions() { ["update", (api, params) => api.batchUpdate(params)], ]); - DashServer.post(RouteStore.googleDocs + "/:sector/:action", (req, res) => { - let sector: GoogleApiServerUtils.Service = req.params.sector as GoogleApiServerUtils.Service; - let action: GoogleApiServerUtils.Action = req.params.action as GoogleApiServerUtils.Action; - GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], { credentialsPath, userId: req.headers.userId as string }).then(endpoint => { - let handler = EndpointHandlerMap.get(action); - if (endpoint && handler) { - let execute = handler(endpoint, req.body).then( - response => res.send(response.data), - rejection => res.send(rejection) - ); - execute.catch(exception => res.send(exception)); - return; - } - res.send(undefined); - }); + router.addSupervisedRoute({ + method: Method.POST, + subscription: new RouteSubscriber(RouteStore.googleDocs).add("sector", "action"), + onValidation: ({ req, res }) => { + let sector: GoogleApiServerUtils.Service = req.params.sector as GoogleApiServerUtils.Service; + let action: GoogleApiServerUtils.Action = req.params.action as GoogleApiServerUtils.Action; + GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], { credentialsPath, userId: req.headers.userId as string }).then(endpoint => { + let handler = EndpointHandlerMap.get(action); + if (endpoint && handler) { + let execute = handler(endpoint, req.body).then( + response => res.send(response.data), + rejection => res.send(rejection) + ); + execute.catch(exception => res.send(exception)); + return; + } + res.send(undefined); + }); + } }); router.addSupervisedRoute({ method: Method.GET, subscription: RouteStore.readGoogleAccessToken, - onValidation: async (user, _req, res) => { + onValidation: async ({ user, res }) => { + if (!user) { + return res.send(undefined); + } const userId = user.id; const token = await Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId); const information = { credentialsPath, userId }; @@ -847,7 +856,10 @@ async function PreliminaryFunctions() { router.addSupervisedRoute({ method: Method.POST, subscription: RouteStore.writeGoogleAccessToken, - onValidation: async (user, req, res) => { + onValidation: async ({ user, req, res }) => { + if (!user) { + return res.send(undefined); + } const userId = user.id; const information = { credentialsPath, userId }; res.send(await GoogleApiServerUtils.ProcessClientSideCode(information, req.body.authenticationCode)); @@ -861,8 +873,11 @@ async function PreliminaryFunctions() { router.addSupervisedRoute({ method: Method.POST, subscription: RouteStore.googlePhotosMediaUpload, - onValidation: async (user, req, res) => { + onValidation: async ({ user, req, res }) => { const { media } = req.body; + if (!user) { + return res.send(undefined); + } const userId = user.id; if (!userId) { return _error(res, userIdError); @@ -914,50 +929,62 @@ async function PreliminaryFunctions() { const requestError = "Unable to execute download: the body's media items were malformed."; const deletionPermissionError = "Cannot perform specialized delete outside of the development environment!"; - DashServer.get("/deleteWithAux", async (_req, res) => { - if (release) { - return _permission_denied(res, deletionPermissionError); + router.addSupervisedRoute({ + method: Method.GET, + subscription: "/deleteWithAux", + onValidation: async ({ res, isRelease }) => { + if (isRelease) { + return _permission_denied(res, deletionPermissionError); + } + await Database.Auxiliary.DeleteAll(); + res.redirect(RouteStore.delete); } - await Database.Auxiliary.DeleteAll(); - res.redirect(RouteStore.delete); - }); + }) - DashServer.get("/deleteWithGoogleCredentials", async (req, res) => { - if (release) { - return _permission_denied(res, deletionPermissionError); + router.addSupervisedRoute({ + method: Method.GET, + subscription: "/deleteWithGoogleCredentials", + onValidation: async ({ res, isRelease }) => { + if (isRelease) { + return _permission_denied(res, deletionPermissionError); + } + await Database.Auxiliary.GoogleAuthenticationToken.DeleteAll(); + res.redirect(RouteStore.delete); } - await Database.Auxiliary.GoogleAuthenticationToken.DeleteAll(); - res.redirect(RouteStore.delete); }); const UploadError = (count: number) => `Unable to upload ${count} images to Dash's server`; - DashServer.post(RouteStore.googlePhotosMediaDownload, async (req, res) => { - const contents: { mediaItems: MediaItem[] } = req.body; - let failed = 0; - if (contents) { - const completed: Opt[] = []; - for (let item of contents.mediaItems) { - const { contentSize, ...attributes } = await DashUploadUtils.InspectImage(item.baseUrl); - const found: Opt = await Database.Auxiliary.QueryUploadHistory(contentSize!); - if (!found) { - const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error)); - if (upload) { - completed.push(upload); - await Database.Auxiliary.LogUpload(upload); + router.addSupervisedRoute({ + method: Method.POST, + subscription: RouteStore.googlePhotosMediaDownload, + onValidation: async ({ req, res }) => { + const contents: { mediaItems: MediaItem[] } = req.body; + let failed = 0; + if (contents) { + const completed: Opt[] = []; + for (let item of contents.mediaItems) { + const { contentSize, ...attributes } = await DashUploadUtils.InspectImage(item.baseUrl); + const found: Opt = await Database.Auxiliary.QueryUploadHistory(contentSize!); + if (!found) { + const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error)); + if (upload) { + completed.push(upload); + await Database.Auxiliary.LogUpload(upload); + } else { + failed++; + } } else { - failed++; + completed.push(found); } - } else { - completed.push(found); } + if (failed) { + return _error(res, UploadError(failed)); + } + return _success(res, completed); } - if (failed) { - return _error(res, UploadError(failed)); - } - return _success(res, completed); + _invalid(res, requestError); } - _invalid(res, requestError); - }); + }) const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = { "number": "_n", -- cgit v1.2.3-70-g09d2 From e20756093c7f3e15795af6b71a4fae3092926452 Mon Sep 17 00:00:00 2001 From: Sam Wilkins <35748010+samwilkins333@users.noreply.github.com> Date: Thu, 17 Oct 2019 03:04:09 -0400 Subject: try execute --- src/server/RouteManager.ts | 28 +++++++++++----------------- 1 file changed, 11 insertions(+), 17 deletions(-) (limited to 'src') diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts index 626014d1a..1e6717348 100644 --- a/src/server/RouteManager.ts +++ b/src/server/RouteManager.ts @@ -3,7 +3,6 @@ import { RouteStore } from "./RouteStore"; import { DashUserModel } from "./authentication/models/user_model"; import * as express from 'express'; import * as qs from 'query-string'; -import { Opt } from "../new_fields/Doc"; export default class RouteManager { private server: express.Express; @@ -30,33 +29,28 @@ export default class RouteManager { addSupervisedRoute(initializer: RouteInitializer) { const { method, subscription, onValidation, onRejection, onError, onGuestAccess } = initializer; const isRelease = this._isRelease; - let abstracted = async (req: express.Request, res: express.Response) => { + let supervised = async (req: express.Request, res: express.Response) => { const { user, originalUrl: target } = req; const core = { req, res, isRelease: isRelease }; - if (user) { + const tryExecute = async (target: any, args: any) => { try { - await onValidation({ ...core, user: user as any }); + await target(args); } catch (e) { if (onError) { onError({ ...core, error: e }); } else { - _error(res, `The server encountered an internal error handling ${target}.`, e); + _error(res, `The server encountered an internal error when serving ${target}.`, e); } } + } + if (user) { + await tryExecute(onValidation, { ...core, user: user as any }); } else { if (isGuestAccess(req) && onGuestAccess) { - await onGuestAccess(core); + await tryExecute(onGuestAccess, core); } else { req.session!.target = target; - try { - await (onRejection || LoginRedirect)(core); - } catch (e) { - if (onError) { - onError({ ...core, error: e }); - } else { - _error(res, `The server encountered an internal error when rejecting ${target}.`, e); - } - } + await tryExecute(onRejection || LoginRedirect, core); } } }; @@ -69,10 +63,10 @@ export default class RouteManager { } switch (method) { case Method.GET: - this.server.get(route, abstracted); + this.server.get(route, supervised); break; case Method.POST: - this.server.post(route, abstracted); + this.server.post(route, supervised); break; } }; -- cgit v1.2.3-70-g09d2 From a432dd429540f5e2b5e1efe7cb766ee96d0f857d Mon Sep 17 00:00:00 2001 From: Sam Wilkins <35748010+samwilkins333@users.noreply.github.com> Date: Thu, 17 Oct 2019 03:05:41 -0400 Subject: logical short ciruit --- src/server/RouteManager.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src') diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts index 1e6717348..879b115ac 100644 --- a/src/server/RouteManager.ts +++ b/src/server/RouteManager.ts @@ -46,7 +46,7 @@ export default class RouteManager { if (user) { await tryExecute(onValidation, { ...core, user: user as any }); } else { - if (isGuestAccess(req) && onGuestAccess) { + if (onGuestAccess && isGuestAccess(req)) { await tryExecute(onGuestAccess, core); } else { req.session!.target = target; -- cgit v1.2.3-70-g09d2 From 19ebd515630155e95318dc3a8801727d54f2db6e Mon Sep 17 00:00:00 2001 From: Sam Wilkins <35748010+samwilkins333@users.noreply.github.com> Date: Thu, 17 Oct 2019 03:09:16 -0400 Subject: reorder --- src/server/RouteManager.ts | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) (limited to 'src') diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts index 879b115ac..5755c1f7e 100644 --- a/src/server/RouteManager.ts +++ b/src/server/RouteManager.ts @@ -4,6 +4,21 @@ import { DashUserModel } from "./authentication/models/user_model"; import * as express from 'express'; import * as qs from 'query-string'; +export enum Method { + GET, + POST +} + +export interface CoreArguments { + req: express.Request, + res: express.Response, + isRelease: boolean; +} + +export type OnValidation = (core: CoreArguments & { user: DashUserModel }) => any | Promise; +export type OnUnauthenticated = (core: CoreArguments) => any | Promise; +export type OnError = (core: CoreArguments & { error: any }) => any | Promise; + export default class RouteManager { private server: express.Express; private _isRelease: boolean; @@ -31,7 +46,7 @@ export default class RouteManager { const isRelease = this._isRelease; let supervised = async (req: express.Request, res: express.Response) => { const { user, originalUrl: target } = req; - const core = { req, res, isRelease: isRelease }; + const core = { req, res, isRelease }; const tryExecute = async (target: any, args: any) => { try { await target(args); @@ -79,21 +94,6 @@ export default class RouteManager { } -export enum Method { - GET, - POST -} - -export interface CoreArguments { - req: express.Request, - res: express.Response, - isRelease: boolean; -} - -export type OnValidation = (core: CoreArguments & { user: DashUserModel }) => any | Promise; -export type OnUnauthenticated = (core: CoreArguments) => any | Promise; -export type OnError = (core: CoreArguments & { error: any }) => any | Promise; - const LoginRedirect: OnUnauthenticated = ({ res }) => res.redirect(RouteStore.login); export interface RouteInitializer { -- cgit v1.2.3-70-g09d2 From e385c9b0285a1d015917eca49dfc190d9810c8d9 Mon Sep 17 00:00:00 2001 From: Sam Wilkins <35748010+samwilkins333@users.noreply.github.com> Date: Thu, 17 Oct 2019 03:19:36 -0400 Subject: sig int log --- src/server/database.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'src') diff --git a/src/server/database.ts b/src/server/database.ts index 44c49d03e..12626e594 100644 --- a/src/server/database.ts +++ b/src/server/database.ts @@ -26,7 +26,7 @@ export namespace Database { const { connection } = mongoose; process.on('SIGINT', () => { connection.close(() => { - console.log('Mongoose default connection disconnected through app termination'); + console.log(`SIGINT closed mongoose connection at ${url}`); process.exit(0); }); }); @@ -34,10 +34,10 @@ export namespace Database { await new Promise((resolve, reject) => { connection.on('error', reject); connection.on('disconnected', () => { - console.log(`Mongoose connection at ${url} now closed`); + console.log(`disconnecting mongoose connection at ${url}`); }); connection.on('connected', () => { - console.log(`Mongoose established default connection at ${url}`); + console.log(`mongoose established default connection at ${url}`); resolve(); }); mongoose.connect(url, { useNewUrlParser: true }); -- cgit v1.2.3-70-g09d2 From df7ed1e41472909e802116adaa285281ec7588ee Mon Sep 17 00:00:00 2001 From: Sam Wilkins <35748010+samwilkins333@users.noreply.github.com> Date: Thu, 17 Oct 2019 04:01:15 -0400 Subject: streamlined --- src/server/RouteManager.ts | 59 ++++++++++++++++++++-------------------------- src/server/index.ts | 18 +++++++++++--- 2 files changed, 40 insertions(+), 37 deletions(-) (limited to 'src') diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts index 5755c1f7e..54f9cc460 100644 --- a/src/server/RouteManager.ts +++ b/src/server/RouteManager.ts @@ -2,7 +2,7 @@ import RouteSubscriber from "./RouteSubscriber"; import { RouteStore } from "./RouteStore"; import { DashUserModel } from "./authentication/models/user_model"; import * as express from 'express'; -import * as qs from 'query-string'; +import { Opt } from "../new_fields/Doc"; export enum Method { GET, @@ -19,6 +19,14 @@ export type OnValidation = (core: CoreArguments & { user: DashUserModel }) => an export type OnUnauthenticated = (core: CoreArguments) => any | Promise; export type OnError = (core: CoreArguments & { error: any }) => any | Promise; +export interface RouteInitializer { + method: Method; + subscription: string | RouteSubscriber | (string | RouteSubscriber)[]; + onValidation: OnValidation; + onUnauthenticated?: OnUnauthenticated; + onError?: OnError; +} + export default class RouteManager { private server: express.Express; private _isRelease: boolean; @@ -42,14 +50,15 @@ export default class RouteManager { * @param subscribers the forward slash prepended path names (reference and add to RouteStore.ts) that will all invoke the given @param handler */ addSupervisedRoute(initializer: RouteInitializer) { - const { method, subscription, onValidation, onRejection, onError, onGuestAccess } = initializer; + const { method, subscription, onValidation, onUnauthenticated, onError } = initializer; const isRelease = this._isRelease; let supervised = async (req: express.Request, res: express.Response) => { const { user, originalUrl: target } = req; const core = { req, res, isRelease }; - const tryExecute = async (target: any, args: any) => { + const tryExecute = async (target: (args: any) => T | Promise, args: any) => { try { - await target(args); + const result = await target(args); + return result; } catch (e) { if (onError) { onError({ ...core, error: e }); @@ -61,13 +70,17 @@ export default class RouteManager { if (user) { await tryExecute(onValidation, { ...core, user: user as any }); } else { - if (onGuestAccess && isGuestAccess(req)) { - await tryExecute(onGuestAccess, core); + req.session!.target = target; + if (!onUnauthenticated) { + res.redirect(RouteStore.login); } else { - req.session!.target = target; - await tryExecute(onRejection || LoginRedirect, core); + await tryExecute(onUnauthenticated, core); } } + const warning = `request to ${target} fell through - this is a fallback response`; + if (!res.headersSent) { + res.send({ warning }); + } }; const subscribe = (subscriber: RouteSubscriber | string) => { let route: string; @@ -94,30 +107,6 @@ export default class RouteManager { } -const LoginRedirect: OnUnauthenticated = ({ res }) => res.redirect(RouteStore.login); - -export interface RouteInitializer { - method: Method; - subscription: string | RouteSubscriber | (string | RouteSubscriber)[]; - onValidation: OnValidation; - onRejection?: OnUnauthenticated; - onGuestAccess?: OnUnauthenticated; - onError?: OnError; -} - -const isSharedDocAccess = (target: string) => { - const shared = qs.parse(qs.extract(target), { sort: false }).sharing === "true"; - const docAccess = target.startsWith("/doc/"); - return shared && docAccess; -}; - -const isGuestAccess = (req: express.Request) => { - if (isSharedDocAccess(req.originalUrl)) { - return true; - } - return false; -} - export const STATUS = { OK: 200, BAD_REQUEST: 400, @@ -139,7 +128,9 @@ export function _invalid(res: express.Response, message: string) { res.status(STATUS.BAD_REQUEST).send(); } -export function _permission_denied(res: express.Response, message: string) { - res.statusMessage = message; +export function _permission_denied(res: express.Response, message?: string) { + if (message) { + res.statusMessage = message; + } res.status(STATUS.BAD_REQUEST).send("Permission Denied!"); } diff --git a/src/server/index.ts b/src/server/index.ts index bba8fc292..81e236894 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -36,6 +36,8 @@ import InitializeServer from './Initialization'; import { Method, _success, _permission_denied, _error, _invalid, OnUnauthenticated } from './RouteManager'; import { command_line } from './ActionUtilities'; var findInFiles = require('find-in-files'); +import * as qs from 'query-string'; + let youtubeApiKey: string; @@ -537,21 +539,31 @@ async function PreliminaryFunctions() { method: Method.GET, subscription: [RouteStore.home, new RouteSubscriber("/doc").add("docId")], onValidation: serve, - onGuestAccess: serve + onUnauthenticated: ({ req, ...remaining }) => { + const { originalUrl: target } = req; + const sharing = qs.parse(qs.extract(req.originalUrl), { sort: false }).sharing === "true"; + const docAccess = target.startsWith("/doc/"); + if (sharing && docAccess) { + serve({ req, ...remaining }); + } + } }); router.addSupervisedRoute({ method: Method.GET, subscription: RouteStore.getUserDocumentId, onValidation: ({ res, user }) => res.send(user.userDocumentId), - onRejection: ({ res }) => res.send(undefined) + onUnauthenticated: ({ res }) => _permission_denied(res) }); router.addSupervisedRoute({ method: Method.GET, subscription: RouteStore.getCurrUser, onValidation: ({ res, user }) => { res.send(JSON.stringify(user)); }, - onRejection: ({ res }) => res.send(JSON.stringify({ id: "__guest__", email: "" })) + onUnauthenticated: ({ res }) => { + res.send(JSON.stringify({ id: "__guest__", email: "" })) + return true; + } }); const ServicesApiKeyMap = new Map([ -- cgit v1.2.3-70-g09d2 From 8884e5cf68c3ad34e23a539201fddda169d70262 Mon Sep 17 00:00:00 2001 From: Sam Wilkins <35748010+samwilkins333@users.noreply.github.com> Date: Thu, 17 Oct 2019 04:19:02 -0400 Subject: tweak timeout for fallback response --- src/server/RouteManager.ts | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) (limited to 'src') diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts index 54f9cc460..37eaded0d 100644 --- a/src/server/RouteManager.ts +++ b/src/server/RouteManager.ts @@ -55,10 +55,9 @@ export default class RouteManager { let supervised = async (req: express.Request, res: express.Response) => { const { user, originalUrl: target } = req; const core = { req, res, isRelease }; - const tryExecute = async (target: (args: any) => T | Promise, args: any) => { + const tryExecute = async (target: (args: any) => any | Promise, args: any) => { try { - const result = await target(args); - return result; + await target(args); } catch (e) { if (onError) { onError({ ...core, error: e }); @@ -71,16 +70,18 @@ export default class RouteManager { await tryExecute(onValidation, { ...core, user: user as any }); } else { req.session!.target = target; - if (!onUnauthenticated) { - res.redirect(RouteStore.login); - } else { + if (onUnauthenticated) { await tryExecute(onUnauthenticated, core); + } else { + res.redirect(RouteStore.login); } } - const warning = `request to ${target} fell through - this is a fallback response`; - if (!res.headersSent) { - res.send({ warning }); - } + setTimeout(() => { + if (!res.headersSent) { + const warning = `request to ${target} fell through - this is a fallback response`; + res.send({ warning }); + } + }, 1000); }; const subscribe = (subscriber: RouteSubscriber | string) => { let route: string; -- cgit v1.2.3-70-g09d2 From 37fa239403f58de77a5c860ff53909dc624beae0 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 17 Oct 2019 14:11:15 -0400 Subject: linter errors, small fixes --- src/server/RouteManager.ts | 8 ++++---- src/server/index.ts | 17 +++++++---------- 2 files changed, 11 insertions(+), 14 deletions(-) (limited to 'src') diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts index 37eaded0d..a3841249b 100644 --- a/src/server/RouteManager.ts +++ b/src/server/RouteManager.ts @@ -10,8 +10,8 @@ export enum Method { } export interface CoreArguments { - req: express.Request, - res: express.Response, + req: express.Request; + res: express.Response; isRelease: boolean; } @@ -65,9 +65,9 @@ export default class RouteManager { _error(res, `The server encountered an internal error when serving ${target}.`, e); } } - } + }; if (user) { - await tryExecute(onValidation, { ...core, user: user as any }); + await tryExecute(onValidation, { ...core, user }); } else { req.session!.target = target; if (onUnauthenticated) { diff --git a/src/server/index.ts b/src/server/index.ts index 81e236894..70add4ab2 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -322,7 +322,7 @@ async function PreliminaryFunctions() { }); zip.finalize(); } - }) + }); router.addSupervisedRoute({ method: Method.POST, @@ -427,7 +427,7 @@ async function PreliminaryFunctions() { } catch (e) { console.log(e); } }); } - }) + }); router.addSupervisedRoute({ method: Method.GET, @@ -533,7 +533,7 @@ async function PreliminaryFunctions() { let detector = new mobileDetect(req.headers['user-agent'] || ""); let filename = detector.mobile() !== null ? 'mobile/image.html' : 'index.html'; res.sendFile(path.join(__dirname, '../../deploy/' + filename)); - } + }; router.addSupervisedRoute({ method: Method.GET, @@ -559,11 +559,8 @@ async function PreliminaryFunctions() { router.addSupervisedRoute({ method: Method.GET, subscription: RouteStore.getCurrUser, - onValidation: ({ res, user }) => { res.send(JSON.stringify(user)); }, - onUnauthenticated: ({ res }) => { - res.send(JSON.stringify({ id: "__guest__", email: "" })) - return true; - } + onValidation: ({ res, user }) => res.send(JSON.stringify(user)), + onUnauthenticated: ({ res }) => res.send(JSON.stringify({ id: "__guest__", email: "" })) }); const ServicesApiKeyMap = new Map([ @@ -951,7 +948,7 @@ async function PreliminaryFunctions() { await Database.Auxiliary.DeleteAll(); res.redirect(RouteStore.delete); } - }) + }); router.addSupervisedRoute({ method: Method.GET, @@ -996,7 +993,7 @@ async function PreliminaryFunctions() { } _invalid(res, requestError); } - }) + }); const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = { "number": "_n", -- cgit v1.2.3-70-g09d2 From 369f5d5e5619b5ecddba4c44a6d134f3444ca544 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 17 Oct 2019 14:29:02 -0400 Subject: cleanup --- src/server/index.ts | 10 ---------- 1 file changed, 10 deletions(-) (limited to 'src') diff --git a/src/server/index.ts b/src/server/index.ts index 70add4ab2..c7eece703 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -553,7 +553,6 @@ async function PreliminaryFunctions() { method: Method.GET, subscription: RouteStore.getUserDocumentId, onValidation: ({ res, user }) => res.send(user.userDocumentId), - onUnauthenticated: ({ res }) => _permission_denied(res) }); router.addSupervisedRoute({ @@ -849,9 +848,6 @@ async function PreliminaryFunctions() { method: Method.GET, subscription: RouteStore.readGoogleAccessToken, onValidation: async ({ user, res }) => { - if (!user) { - return res.send(undefined); - } const userId = user.id; const token = await Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId); const information = { credentialsPath, userId }; @@ -866,9 +862,6 @@ async function PreliminaryFunctions() { method: Method.POST, subscription: RouteStore.writeGoogleAccessToken, onValidation: async ({ user, req, res }) => { - if (!user) { - return res.send(undefined); - } const userId = user.id; const information = { credentialsPath, userId }; res.send(await GoogleApiServerUtils.ProcessClientSideCode(information, req.body.authenticationCode)); @@ -884,9 +877,6 @@ async function PreliminaryFunctions() { subscription: RouteStore.googlePhotosMediaUpload, onValidation: async ({ user, req, res }) => { const { media } = req.body; - if (!user) { - return res.send(undefined); - } const userId = user.id; if (!userId) { return _error(res, userIdError); -- cgit v1.2.3-70-g09d2 From f42620a33f76f8fdcf7417498f3fb2c1588c064d Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 17 Oct 2019 14:29:29 -0400 Subject: saved --- src/server/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src') diff --git a/src/server/index.ts b/src/server/index.ts index c7eece703..2f4e65b46 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -552,7 +552,7 @@ async function PreliminaryFunctions() { router.addSupervisedRoute({ method: Method.GET, subscription: RouteStore.getUserDocumentId, - onValidation: ({ res, user }) => res.send(user.userDocumentId), + onValidation: ({ res, user }) => res.send(user.userDocumentId) }); router.addSupervisedRoute({ -- cgit v1.2.3-70-g09d2 From cee55b4a1b13909d55708eee6c364206ae7c0d4f Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 22 Oct 2019 19:39:19 -0400 Subject: api managers and web socket initial refactoring --- src/server/ApiManagers/ApiManager.ts | 7 + src/server/ApiManagers/SearchManager.ts | 49 +++++ src/server/ApiManagers/UserManager.ts | 40 ++++ src/server/ApiManagers/UtilManager.ts | 54 ++++++ src/server/Initialization.ts | 4 +- src/server/Websocket/Websocket.ts | 213 +++++++++++++++++++++ src/server/index.ts | 327 ++------------------------------ 7 files changed, 378 insertions(+), 316 deletions(-) create mode 100644 src/server/ApiManagers/ApiManager.ts create mode 100644 src/server/ApiManagers/SearchManager.ts create mode 100644 src/server/ApiManagers/UserManager.ts create mode 100644 src/server/ApiManagers/UtilManager.ts create mode 100644 src/server/Websocket/Websocket.ts (limited to 'src') diff --git a/src/server/ApiManagers/ApiManager.ts b/src/server/ApiManagers/ApiManager.ts new file mode 100644 index 000000000..264c78a17 --- /dev/null +++ b/src/server/ApiManagers/ApiManager.ts @@ -0,0 +1,7 @@ +import RouteManager from "../RouteManager"; + +export default abstract class ApiManager { + + public abstract register(router: RouteManager): void; + +} \ No newline at end of file diff --git a/src/server/ApiManagers/SearchManager.ts b/src/server/ApiManagers/SearchManager.ts new file mode 100644 index 000000000..15b87204c --- /dev/null +++ b/src/server/ApiManagers/SearchManager.ts @@ -0,0 +1,49 @@ +import ApiManager from "./ApiManager"; +import RouteManager, { Method } from "../RouteManager"; +import { Search } from "../Search"; +var findInFiles = require('find-in-files'); +import * as path from 'path'; +import { uploadDirectory } from ".."; + +export default class SearchManager extends ApiManager { + + public register(router: RouteManager): void { + + router.addSupervisedRoute({ + method: Method.GET, + subscription: "/textsearch", + onValidation: async ({ req, res }) => { + let q = req.query.q; + if (q === undefined) { + res.send([]); + return; + } + let results = await findInFiles.find({ 'term': q, 'flags': 'ig' }, uploadDirectory + "text", ".txt$"); + let resObj: { ids: string[], numFound: number, lines: string[] } = { ids: [], numFound: 0, lines: [] }; + for (var result in results) { + resObj.ids.push(path.basename(result, ".txt").replace(/upload_/, "")); + resObj.lines.push(results[result].line); + resObj.numFound++; + } + res.send(resObj); + } + }); + + router.addSupervisedRoute({ + method: Method.GET, + subscription: "/search", + onValidation: async ({ req, res }) => { + const solrQuery: any = {}; + ["q", "fq", "start", "rows", "hl", "hl.fl"].forEach(key => solrQuery[key] = req.query[key]); + if (solrQuery.q === undefined) { + res.send([]); + return; + } + let results = await Search.Instance.search(solrQuery); + res.send(results); + } + }); + + } + +} \ No newline at end of file diff --git a/src/server/ApiManagers/UserManager.ts b/src/server/ApiManagers/UserManager.ts new file mode 100644 index 000000000..bb8837dc6 --- /dev/null +++ b/src/server/ApiManagers/UserManager.ts @@ -0,0 +1,40 @@ +import ApiManager from "./ApiManager"; +import RouteManager, { Method } from "../RouteManager"; +import { WebSocket } from "../Websocket/Websocket"; + +export default class UserManager extends ApiManager { + + public register(router: RouteManager): void { + router.addSupervisedRoute({ + method: Method.GET, + subscription: "/whosOnline", + onValidation: ({ res }) => { + let users: any = { active: {}, inactive: {} }; + const now = Date.now(); + + const { timeMap } = WebSocket; + for (const user in timeMap) { + const time = timeMap[user]; + const key = ((now - time) / 1000) < (60 * 5) ? "active" : "inactive"; + users[key][user] = `Last active ${this.msToTime(now - time)} ago`; + } + + res.send(users); + } + }); + } + + private msToTime(duration: number) { + let milliseconds = Math.floor((duration % 1000) / 100), + seconds = Math.floor((duration / 1000) % 60), + minutes = Math.floor((duration / (1000 * 60)) % 60), + hours = Math.floor((duration / (1000 * 60 * 60)) % 24); + + let hoursS = (hours < 10) ? "0" + hours : hours; + let minutesS = (minutes < 10) ? "0" + minutes : minutes; + let secondsS = (seconds < 10) ? "0" + seconds : seconds; + + return hoursS + ":" + minutesS + ":" + secondsS + "." + milliseconds; + } + +} \ No newline at end of file diff --git a/src/server/ApiManagers/UtilManager.ts b/src/server/ApiManagers/UtilManager.ts new file mode 100644 index 000000000..79b904e8a --- /dev/null +++ b/src/server/ApiManagers/UtilManager.ts @@ -0,0 +1,54 @@ +import ApiManager from "./ApiManager"; +import RouteManager, { Method } from "../RouteManager"; +import { exec } from 'child_process'; +import { command_line } from "../ActionUtilities"; + +export default class UtilManager extends ApiManager { + + public register(router: RouteManager): void { + + router.addSupervisedRoute({ + method: Method.GET, + subscription: "/pull", + onValidation: ({ res }) => { + exec('"C:\\Program Files\\Git\\git-bash.exe" -c "git pull"', err => { + if (err) { + res.send(err.message); + return; + } + res.redirect("/"); + }); + } + }); + + router.addSupervisedRoute({ + method: Method.GET, + subscription: "/buxton", + onValidation: ({ res }) => { + let cwd = '../scraping/buxton'; + + let onResolved = (stdout: string) => { console.log(stdout); res.redirect("/"); }; + let onRejected = (err: any) => { console.error(err.message); res.send(err); }; + let tryPython3 = () => command_line('python3 scraper.py', cwd).then(onResolved, onRejected); + + command_line('python scraper.py', cwd).then(onResolved, tryPython3); + }, + }); + + router.addSupervisedRoute({ + method: Method.GET, + subscription: "/version", + onValidation: ({ res }) => { + exec('"C:\\Program Files\\Git\\bin\\git.exe" rev-parse HEAD', (err, stdout) => { + if (err) { + res.send(err.message); + return; + } + res.send(stdout); + }); + } + }); + + } + +} \ No newline at end of file diff --git a/src/server/Initialization.ts b/src/server/Initialization.ts index 9646dc195..e4c97cc48 100644 --- a/src/server/Initialization.ts +++ b/src/server/Initialization.ts @@ -37,9 +37,11 @@ export default async function InitializeServer(options: InitializationOptions) { registerAuthenticationRoutes(server); registerCorsProxy(server); - routeSetter(new RouteManager(server, determineEnvironment())); + const isRelease = determineEnvironment(); + routeSetter(new RouteManager(server, isRelease)); server.listen(listenAtPort, () => console.log(`server started at http://localhost:${listenAtPort}`)); + return isRelease; } const week = 7 * 24 * 60 * 60 * 1000; diff --git a/src/server/Websocket/Websocket.ts b/src/server/Websocket/Websocket.ts new file mode 100644 index 000000000..2461dd8d5 --- /dev/null +++ b/src/server/Websocket/Websocket.ts @@ -0,0 +1,213 @@ +import { Utils } from "../../Utils"; +import { MessageStore, Transferable, Types, Diff, YoutubeQueryInput, YoutubeQueryTypes } from "../Message"; +import { Client } from "../Client"; +import { Socket } from "socket.io"; +import { Database } from "../database"; +import { Search } from "../Search"; +import io from 'socket.io'; +import YoutubeApi from "../apis/youtube/youtubeApiSample"; +import { youtubeApiKey } from ".."; + +export namespace WebSocket { + + interface Map { + [key: string]: Client; + } + let clients: Map = {}; + + export const socketMap = new Map(); + export const timeMap: { [id: string]: number } = {}; + + export function initialize(serverPort: number, isRelease: boolean) { + const endpoint = io(); + endpoint.listen(serverPort); + console.log(`listening on port ${serverPort}`); + + endpoint.on("connection", function (socket: Socket) { + socket.use((_packet, next) => { + let id = socketMap.get(socket); + if (id) { + timeMap[id] = Date.now(); + } + next(); + }); + + Utils.Emit(socket, MessageStore.Foo, "handshooken"); + + Utils.AddServerHandler(socket, MessageStore.Bar, guid => barReceived(socket, guid)); + Utils.AddServerHandler(socket, MessageStore.SetField, (args) => setField(socket, args)); + Utils.AddServerHandlerCallback(socket, MessageStore.GetField, getField); + Utils.AddServerHandlerCallback(socket, MessageStore.GetFields, getFields); + if (isRelease) { + Utils.AddServerHandler(socket, MessageStore.DeleteAll, deleteFields); + } + + Utils.AddServerHandler(socket, MessageStore.CreateField, CreateField); + Utils.AddServerHandlerCallback(socket, MessageStore.YoutubeApiQuery, HandleYoutubeQuery); + Utils.AddServerHandler(socket, MessageStore.UpdateField, diff => UpdateField(socket, diff)); + Utils.AddServerHandler(socket, MessageStore.DeleteField, id => DeleteField(socket, id)); + Utils.AddServerHandler(socket, MessageStore.DeleteFields, ids => DeleteFields(socket, ids)); + Utils.AddServerHandlerCallback(socket, MessageStore.GetRefField, GetRefField); + Utils.AddServerHandlerCallback(socket, MessageStore.GetRefFields, GetRefFields); + }); + + } + + function HandleYoutubeQuery([query, callback]: [YoutubeQueryInput, (result?: any[]) => void]) { + switch (query.type) { + case YoutubeQueryTypes.Channels: + YoutubeApi.authorizedGetChannel(youtubeApiKey); + break; + case YoutubeQueryTypes.SearchVideo: + YoutubeApi.authorizedGetVideos(youtubeApiKey, query.userInput, callback); + case YoutubeQueryTypes.VideoDetails: + YoutubeApi.authorizedGetVideoDetails(youtubeApiKey, query.videoIds, callback); + } + } + + export async function deleteFields() { + await Database.Instance.deleteAll(); + await Search.Instance.clear(); + await Database.Instance.deleteAll('newDocuments'); + } + + export async function deleteAll() { + await Database.Instance.deleteAll(); + await Database.Instance.deleteAll('newDocuments'); + await Database.Instance.deleteAll('sessions'); + await Database.Instance.deleteAll('users'); + await Search.Instance.clear(); + } + + function barReceived(socket: SocketIO.Socket, guid: string) { + clients[guid] = new Client(guid.toString()); + console.log(`User ${guid} has connected`); + socketMap.set(socket, guid); + } + + function getField([id, callback]: [string, (result?: Transferable) => void]) { + Database.Instance.getDocument(id, (result?: Transferable) => + callback(result ? result : undefined)); + } + + function getFields([ids, callback]: [string[], (result: Transferable[]) => void]) { + Database.Instance.getDocuments(ids, callback); + } + + function setField(socket: Socket, newValue: Transferable) { + Database.Instance.update(newValue.id, newValue, () => + socket.broadcast.emit(MessageStore.SetField.Message, newValue)); + if (newValue.type === Types.Text) { + Search.Instance.updateDocument({ id: newValue.id, data: (newValue as any).data }); + console.log("set field"); + console.log("checking in"); + } + } + + function GetRefField([id, callback]: [string, (result?: Transferable) => void]) { + Database.Instance.getDocument(id, callback, "newDocuments"); + } + + function GetRefFields([ids, callback]: [string[], (result?: Transferable[]) => void]) { + Database.Instance.getDocuments(ids, callback, "newDocuments"); + } + + const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = { + "number": "_n", + "string": "_t", + "boolean": "_b", + "image": ["_t", "url"], + "video": ["_t", "url"], + "pdf": ["_t", "url"], + "audio": ["_t", "url"], + "web": ["_t", "url"], + "date": ["_d", value => new Date(value.date).toISOString()], + "proxy": ["_i", "fieldId"], + "list": ["_l", list => { + const results = []; + for (const value of list.fields) { + const term = ToSearchTerm(value); + if (term) { + results.push(term.value); + } + } + return results.length ? results : null; + }] + }; + + function ToSearchTerm(val: any): { suffix: string, value: any } | undefined { + if (val === null || val === undefined) { + return; + } + const type = val.__type || typeof val; + let suffix = suffixMap[type]; + if (!suffix) { + return; + } + + if (Array.isArray(suffix)) { + const accessor = suffix[1]; + if (typeof accessor === "function") { + val = accessor(val); + } else { + val = val[accessor]; + } + suffix = suffix[0]; + } + + return { suffix, value: val }; + } + + function getSuffix(value: string | [string, any]): string { + return typeof value === "string" ? value : value[0]; + } + + function UpdateField(socket: Socket, diff: Diff) { + Database.Instance.update(diff.id, diff.diff, + () => socket.broadcast.emit(MessageStore.UpdateField.Message, diff), false, "newDocuments"); + const docfield = diff.diff.$set; + if (!docfield) { + return; + } + const update: any = { id: diff.id }; + let dynfield = false; + for (let key in docfield) { + if (!key.startsWith("fields.")) continue; + dynfield = true; + let val = docfield[key]; + key = key.substring(7); + Object.values(suffixMap).forEach(suf => update[key + getSuffix(suf)] = { set: null }); + let term = ToSearchTerm(val); + if (term !== undefined) { + let { suffix, value } = term; + update[key + suffix] = { set: value }; + } + } + if (dynfield) { + Search.Instance.updateDocument(update); + } + } + + function DeleteField(socket: Socket, id: string) { + Database.Instance.delete({ _id: id }, "newDocuments").then(() => { + socket.broadcast.emit(MessageStore.DeleteField.Message, id); + }); + + Search.Instance.deleteDocuments([id]); + } + + function DeleteFields(socket: Socket, ids: string[]) { + Database.Instance.delete({ _id: { $in: ids } }, "newDocuments").then(() => { + socket.broadcast.emit(MessageStore.DeleteFields.Message, ids); + }); + + Search.Instance.deleteDocuments(ids); + + } + + function CreateField(newValue: any) { + Database.Instance.insert(newValue, "newDocuments"); + } + +} + diff --git a/src/server/index.ts b/src/server/index.ts index 2f4e65b46..ae0e79458 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -1,5 +1,4 @@ require('dotenv').config(); -import { exec } from 'child_process'; import * as formidable from 'formidable'; import * as fs from 'fs'; import * as sharp from 'sharp'; @@ -7,10 +6,6 @@ import * as Pdfjs from 'pdfjs-dist'; const imageDataUri = require('image-data-uri'); import * as mobileDetect from 'mobile-detect'; import * as path from 'path'; -import * as io from 'socket.io'; -import { Socket } from 'socket.io'; -import { Utils } from '../Utils'; -import { Client } from './Client'; import { Database } from './database'; import { MessageStore, Transferable, Types, Diff, YoutubeQueryTypes as YoutubeQueryType, YoutubeQueryInput } from "./Message"; import { RouteStore } from './RouteStore'; @@ -34,12 +29,13 @@ import { reject } from 'bluebird'; import RouteSubscriber from './RouteSubscriber'; import InitializeServer from './Initialization'; import { Method, _success, _permission_denied, _error, _invalid, OnUnauthenticated } from './RouteManager'; -import { command_line } from './ActionUtilities'; -var findInFiles = require('find-in-files'); import * as qs from 'query-string'; +import UtilManager from './ApiManagers/UtilManager'; +import SearchManager from './ApiManagers/SearchManager'; +import UserManager from './ApiManagers/UserManager'; +import { WebSocket } from './Websocket/Websocket'; - -let youtubeApiKey: string; +export let youtubeApiKey: string; export type Hierarchy = { [id: string]: string | Hierarchy }; export type ZipMutator = (file: Archiver.Archiver) => void | Promise; @@ -53,7 +49,7 @@ export interface NewMediaItem { const pngTypes = [".png", ".PNG"]; const jpgTypes = [".jpg", ".JPG", ".jpeg", ".JPEG"]; -const uploadDirectory = __dirname + "/public/files/"; +export const uploadDirectory = __dirname + "/public/files/"; const pdfDirectory = uploadDirectory + "text"; const solrURL = "http://localhost:8983/solr/#/dash"; @@ -70,96 +66,11 @@ async function PreliminaryFunctions() { await InitializeServer({ listenAtPort: 1050, routeSetter: router => { + new UtilManager().register(router); + new SearchManager().register(router); + new UserManager().register(router); - router.addSupervisedRoute({ - method: Method.GET, - subscription: "/pull", - onValidation: ({ res }) => { - exec('"C:\\Program Files\\Git\\git-bash.exe" -c "git pull"', err => { - if (err) { - res.send(err.message); - return; - } - res.redirect("/"); - }); - } - }); - - router.addSupervisedRoute({ - method: Method.GET, - subscription: "/textsearch", - onValidation: async ({ req, res }) => { - let q = req.query.q; - if (q === undefined) { - res.send([]); - return; - } - let results = await findInFiles.find({ 'term': q, 'flags': 'ig' }, uploadDirectory + "text", ".txt$"); - let resObj: { ids: string[], numFound: number, lines: string[] } = { ids: [], numFound: 0, lines: [] }; - for (var result in results) { - resObj.ids.push(path.basename(result, ".txt").replace(/upload_/, "")); - resObj.lines.push(results[result].line); - resObj.numFound++; - } - res.send(resObj); - } - }); - - router.addSupervisedRoute({ - method: Method.GET, - subscription: "/buxton", - onValidation: ({ res }) => { - let cwd = '../scraping/buxton'; - - let onResolved = (stdout: string) => { console.log(stdout); res.redirect("/"); }; - let onRejected = (err: any) => { console.error(err.message); res.send(err); }; - let tryPython3 = () => command_line('python3 scraper.py', cwd).then(onResolved, onRejected); - - command_line('python scraper.py', cwd).then(onResolved, tryPython3); - }, - }); - - router.addSupervisedRoute({ - method: Method.GET, - subscription: "/version", - onValidation: ({ res }) => { - exec('"C:\\Program Files\\Git\\bin\\git.exe" rev-parse HEAD', (err, stdout) => { - if (err) { - res.send(err.message); - return; - } - res.send(stdout); - }); - } - }); - - router.addSupervisedRoute({ - method: Method.GET, - subscription: "/search", - onValidation: async ({ req, res }) => { - const solrQuery: any = {}; - ["q", "fq", "start", "rows", "hl", "hl.fl"].forEach(key => solrQuery[key] = req.query[key]); - if (solrQuery.q === undefined) { - res.send([]); - return; - } - let results = await Search.Instance.search(solrQuery); - res.send(results); - } - }); - - function msToTime(duration: number) { - let milliseconds = Math.floor((duration % 1000) / 100), - seconds = Math.floor((duration / 1000) % 60), - minutes = Math.floor((duration / (1000 * 60)) % 60), - hours = Math.floor((duration / (1000 * 60 * 60)) % 24); - - let hoursS = (hours < 10) ? "0" + hours : hours; - let minutesS = (minutes < 10) ? "0" + minutes : minutes; - let secondsS = (seconds < 10) ? "0" + seconds : seconds; - - return hoursS + ":" + minutesS + ":" + secondsS + "." + milliseconds; - } + WebSocket.initialize(serverPort, router.isRelease); async function getDocs(id: string) { const files = new Set(); @@ -429,23 +340,6 @@ async function PreliminaryFunctions() { } }); - router.addSupervisedRoute({ - method: Method.GET, - subscription: "/whosOnline", - onValidation: ({ res }) => { - let users: any = { active: {}, inactive: {} }; - const now = Date.now(); - - for (const user in timeMap) { - const time = timeMap[user]; - const key = ((now - time) / 1000) < (60 * 5) ? "active" : "inactive"; - users[key][user] = `Last active ${msToTime(now - time)} ago`; - } - - res.send(users); - } - }); - router.addSupervisedRoute({ method: Method.GET, subscription: new RouteSubscriber("/thumbnail").add("filename"), @@ -704,7 +598,7 @@ async function PreliminaryFunctions() { if (isRelease) { return _permission_denied(res, deletionPermissionError); } - deleteFields().then(() => res.redirect(RouteStore.home)); + WebSocket.deleteFields().then(() => res.redirect(RouteStore.home)); } }); @@ -715,106 +609,10 @@ async function PreliminaryFunctions() { if (isRelease) { return _permission_denied(res, deletionPermissionError); } - deleteAll().then(() => res.redirect(RouteStore.home)); - } - }); - - const server = io(); - interface Map { - [key: string]: Client; - } - let clients: Map = {}; - - let socketMap = new Map(); - let timeMap: { [id: string]: number } = {}; - - server.on("connection", function (socket: Socket) { - socket.use((packet, next) => { - let id = socketMap.get(socket); - if (id) { - timeMap[id] = Date.now(); - } - next(); - }); - - Utils.Emit(socket, MessageStore.Foo, "handshooken"); - - Utils.AddServerHandler(socket, MessageStore.Bar, guid => barReceived(socket, guid)); - Utils.AddServerHandler(socket, MessageStore.SetField, (args) => setField(socket, args)); - Utils.AddServerHandlerCallback(socket, MessageStore.GetField, getField); - Utils.AddServerHandlerCallback(socket, MessageStore.GetFields, getFields); - if (!router.release) { - Utils.AddServerHandler(socket, MessageStore.DeleteAll, deleteFields); + WebSocket.deleteAll().then(() => res.redirect(RouteStore.home)); } - - Utils.AddServerHandler(socket, MessageStore.CreateField, CreateField); - Utils.AddServerHandlerCallback(socket, MessageStore.YoutubeApiQuery, HandleYoutubeQuery); - Utils.AddServerHandler(socket, MessageStore.UpdateField, diff => UpdateField(socket, diff)); - Utils.AddServerHandler(socket, MessageStore.DeleteField, id => DeleteField(socket, id)); - Utils.AddServerHandler(socket, MessageStore.DeleteFields, ids => DeleteFields(socket, ids)); - Utils.AddServerHandlerCallback(socket, MessageStore.GetRefField, GetRefField); - Utils.AddServerHandlerCallback(socket, MessageStore.GetRefFields, GetRefFields); }); - async function deleteFields() { - await Database.Instance.deleteAll(); - await Search.Instance.clear(); - await Database.Instance.deleteAll('newDocuments'); - } - - async function deleteAll() { - await Database.Instance.deleteAll(); - await Database.Instance.deleteAll('newDocuments'); - await Database.Instance.deleteAll('sessions'); - await Database.Instance.deleteAll('users'); - await Search.Instance.clear(); - } - - function barReceived(socket: SocketIO.Socket, guid: string) { - clients[guid] = new Client(guid.toString()); - console.log(`User ${guid} has connected`); - socketMap.set(socket, guid); - } - - function getField([id, callback]: [string, (result?: Transferable) => void]) { - Database.Instance.getDocument(id, (result?: Transferable) => - callback(result ? result : undefined)); - } - - function getFields([ids, callback]: [string[], (result: Transferable[]) => void]) { - Database.Instance.getDocuments(ids, callback); - } - - function setField(socket: Socket, newValue: Transferable) { - Database.Instance.update(newValue.id, newValue, () => - socket.broadcast.emit(MessageStore.SetField.Message, newValue)); - if (newValue.type === Types.Text) { - Search.Instance.updateDocument({ id: newValue.id, data: (newValue as any).data }); - console.log("set field"); - console.log("checking in"); - } - } - - function GetRefField([id, callback]: [string, (result?: Transferable) => void]) { - Database.Instance.getDocument(id, callback, "newDocuments"); - } - - function GetRefFields([ids, callback]: [string[], (result?: Transferable[]) => void]) { - Database.Instance.getDocuments(ids, callback, "newDocuments"); - } - - function HandleYoutubeQuery([query, callback]: [YoutubeQueryInput, (result?: any[]) => void]) { - switch (query.type) { - case YoutubeQueryType.Channels: - YoutubeApi.authorizedGetChannel(youtubeApiKey); - break; - case YoutubeQueryType.SearchVideo: - YoutubeApi.authorizedGetVideos(youtubeApiKey, query.userInput, callback); - case YoutubeQueryType.VideoDetails: - YoutubeApi.authorizedGetVideoDetails(youtubeApiKey, query.videoIds, callback); - } - } - const credentialsPath = path.join(__dirname, "./credentials/google_docs_credentials.json"); const EndpointHandlerMap = new Map([ @@ -984,107 +782,6 @@ async function PreliminaryFunctions() { _invalid(res, requestError); } }); - - const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = { - "number": "_n", - "string": "_t", - "boolean": "_b", - "image": ["_t", "url"], - "video": ["_t", "url"], - "pdf": ["_t", "url"], - "audio": ["_t", "url"], - "web": ["_t", "url"], - "date": ["_d", value => new Date(value.date).toISOString()], - "proxy": ["_i", "fieldId"], - "list": ["_l", list => { - const results = []; - for (const value of list.fields) { - const term = ToSearchTerm(value); - if (term) { - results.push(term.value); - } - } - return results.length ? results : null; - }] - }; - - function ToSearchTerm(val: any): { suffix: string, value: any } | undefined { - if (val === null || val === undefined) { - return; - } - const type = val.__type || typeof val; - let suffix = suffixMap[type]; - if (!suffix) { - return; - } - - if (Array.isArray(suffix)) { - const accessor = suffix[1]; - if (typeof accessor === "function") { - val = accessor(val); - } else { - val = val[accessor]; - } - suffix = suffix[0]; - } - - return { suffix, value: val }; - } - - function getSuffix(value: string | [string, any]): string { - return typeof value === "string" ? value : value[0]; - } - - function UpdateField(socket: Socket, diff: Diff) { - Database.Instance.update(diff.id, diff.diff, - () => socket.broadcast.emit(MessageStore.UpdateField.Message, diff), false, "newDocuments"); - const docfield = diff.diff.$set; - if (!docfield) { - return; - } - const update: any = { id: diff.id }; - let dynfield = false; - for (let key in docfield) { - if (!key.startsWith("fields.")) continue; - dynfield = true; - let val = docfield[key]; - key = key.substring(7); - Object.values(suffixMap).forEach(suf => update[key + getSuffix(suf)] = { set: null }); - let term = ToSearchTerm(val); - if (term !== undefined) { - let { suffix, value } = term; - update[key + suffix] = { set: value }; - } - } - if (dynfield) { - Search.Instance.updateDocument(update); - } - } - - function DeleteField(socket: Socket, id: string) { - Database.Instance.delete({ _id: id }, "newDocuments").then(() => { - socket.broadcast.emit(MessageStore.DeleteField.Message, id); - }); - - Search.Instance.deleteDocuments([id]); - } - - function DeleteFields(socket: Socket, ids: string[]) { - Database.Instance.delete({ _id: { $in: ids } }, "newDocuments").then(() => { - socket.broadcast.emit(MessageStore.DeleteFields.Message, ids); - }); - - Search.Instance.deleteDocuments(ids); - - } - - function CreateField(newValue: any) { - Database.Instance.insert(newValue, "newDocuments"); - } - - server.listen(serverPort); - console.log(`listening on port ${serverPort}`); } }); - })(); \ No newline at end of file -- cgit v1.2.3-70-g09d2 From fcf67616b9fd6f98d631f6c8eab31a19a2a2e86d Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 22 Oct 2019 19:45:33 -0400 Subject: start --- src/server/Initialization.ts | 3 +- src/server/RouteManager.ts | 2 +- src/server/index.ts | 1312 +++++++++++++++++++++--------------------- 3 files changed, 661 insertions(+), 656 deletions(-) (limited to 'src') diff --git a/src/server/Initialization.ts b/src/server/Initialization.ts index e4c97cc48..fbb5ae7a6 100644 --- a/src/server/Initialization.ts +++ b/src/server/Initialization.ts @@ -19,9 +19,10 @@ import * as whm from 'webpack-hot-middleware'; import * as fs from 'fs'; import * as request from 'request'; +export type RouteSetter = (server: RouteManager) => void; export interface InitializationOptions { listenAtPort: number; - routeSetter: (server: RouteManager) => void; + routeSetter: RouteSetter; } export default async function InitializeServer(options: InitializationOptions) { diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts index a3841249b..b3864e89c 100644 --- a/src/server/RouteManager.ts +++ b/src/server/RouteManager.ts @@ -31,7 +31,7 @@ export default class RouteManager { private server: express.Express; private _isRelease: boolean; - public get release() { + public get isRelease() { return this._isRelease; } diff --git a/src/server/index.ts b/src/server/index.ts index ae0e79458..93f4238bc 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -7,7 +7,6 @@ const imageDataUri = require('image-data-uri'); import * as mobileDetect from 'mobile-detect'; import * as path from 'path'; import { Database } from './database'; -import { MessageStore, Transferable, Types, Diff, YoutubeQueryTypes as YoutubeQueryType, YoutubeQueryInput } from "./Message"; import { RouteStore } from './RouteStore'; import v4 = require('uuid/v4'); import { createCanvas } from "canvas"; @@ -28,7 +27,7 @@ import { ParsedPDF } from "./PdfTypes"; import { reject } from 'bluebird'; import RouteSubscriber from './RouteSubscriber'; import InitializeServer from './Initialization'; -import { Method, _success, _permission_denied, _error, _invalid, OnUnauthenticated } from './RouteManager'; +import RouteManager, { Method, _success, _permission_denied, _error, _invalid, OnUnauthenticated } from './RouteManager'; import * as qs from 'query-string'; import UtilManager from './ApiManagers/UtilManager'; import SearchManager from './ApiManagers/SearchManager'; @@ -53,735 +52,740 @@ export const uploadDirectory = __dirname + "/public/files/"; const pdfDirectory = uploadDirectory + "text"; const solrURL = "http://localhost:8983/solr/#/dash"; -YoutubeApi.readApiKey((apiKey: string) => youtubeApiKey = apiKey); +start(); + +async function start() { + await PreliminaryFunctions(); + await InitializeServer({ listenAtPort: 1050, routeSetter }); +} async function PreliminaryFunctions() { + await new Promise(resolve => { + YoutubeApi.readApiKey((apiKey: string) => { + youtubeApiKey = apiKey; + resolve(); + }); + }); await GoogleApiServerUtils.LoadOAuthClient(); await DashUploadUtils.createIfNotExists(pdfDirectory); await Database.tryInitializeConnection(); } -(async () => { - await PreliminaryFunctions(); - await InitializeServer({ - listenAtPort: 1050, - routeSetter: router => { - new UtilManager().register(router); - new SearchManager().register(router); - new UserManager().register(router); - - WebSocket.initialize(serverPort, router.isRelease); - - async function getDocs(id: string) { - const files = new Set(); - const docs: { [id: string]: any } = {}; - const fn = (doc: any): string[] => { - const id = doc.id; - if (typeof id === "string" && id.endsWith("Proto")) { - //Skip protos - return []; - } - const ids: string[] = []; - for (const key in doc.fields) { - if (!doc.fields.hasOwnProperty(key)) { - continue; - } - const field = doc.fields[key]; - if (field === undefined || field === null) { - continue; - } +function routeSetter(router: RouteManager) { + new UtilManager().register(router); + new SearchManager().register(router); + new UserManager().register(router); + + WebSocket.initialize(serverPort, router.isRelease); + + async function getDocs(id: string) { + const files = new Set(); + const docs: { [id: string]: any } = {}; + const fn = (doc: any): string[] => { + const id = doc.id; + if (typeof id === "string" && id.endsWith("Proto")) { + //Skip protos + return []; + } + const ids: string[] = []; + for (const key in doc.fields) { + if (!doc.fields.hasOwnProperty(key)) { + continue; + } + const field = doc.fields[key]; + if (field === undefined || field === null) { + continue; + } - if (field.__type === "proxy" || field.__type === "prefetch_proxy") { - ids.push(field.fieldId); - } else if (field.__type === "script" || field.__type === "computed") { - if (field.captures) { - ids.push(field.captures.fieldId); - } - } else if (field.__type === "list") { - ids.push(...fn(field)); - } else if (typeof field === "string") { - const re = /"(?:dataD|d)ocumentId"\s*:\s*"([\w\-]*)"/g; - let match: string[] | null; - while ((match = re.exec(field)) !== null) { - ids.push(match[1]); - } - } else if (field.__type === "RichTextField") { - const re = /"href"\s*:\s*"(.*?)"/g; - let match: string[] | null; - while ((match = re.exec(field.Data)) !== null) { - const urlString = match[1]; - const split = new URL(urlString).pathname.split("doc/"); - if (split.length > 1) { - ids.push(split[split.length - 1]); - } - } - const re2 = /"src"\s*:\s*"(.*?)"/g; - while ((match = re2.exec(field.Data)) !== null) { - const urlString = match[1]; - const pathname = new URL(urlString).pathname; - files.add(pathname); - } - } else if (["audio", "image", "video", "pdf", "web"].includes(field.__type)) { - const url = new URL(field.url); - const pathname = url.pathname; - files.add(pathname); + if (field.__type === "proxy" || field.__type === "prefetch_proxy") { + ids.push(field.fieldId); + } else if (field.__type === "script" || field.__type === "computed") { + if (field.captures) { + ids.push(field.captures.fieldId); + } + } else if (field.__type === "list") { + ids.push(...fn(field)); + } else if (typeof field === "string") { + const re = /"(?:dataD|d)ocumentId"\s*:\s*"([\w\-]*)"/g; + let match: string[] | null; + while ((match = re.exec(field)) !== null) { + ids.push(match[1]); + } + } else if (field.__type === "RichTextField") { + const re = /"href"\s*:\s*"(.*?)"/g; + let match: string[] | null; + while ((match = re.exec(field.Data)) !== null) { + const urlString = match[1]; + const split = new URL(urlString).pathname.split("doc/"); + if (split.length > 1) { + ids.push(split[split.length - 1]); } } - - if (doc.id) { - docs[doc.id] = doc; + const re2 = /"src"\s*:\s*"(.*?)"/g; + while ((match = re2.exec(field.Data)) !== null) { + const urlString = match[1]; + const pathname = new URL(urlString).pathname; + files.add(pathname); } - return ids; - }; - await Database.Instance.visit([id], fn); - return { id, docs, files }; + } else if (["audio", "image", "video", "pdf", "web"].includes(field.__type)) { + const url = new URL(field.url); + const pathname = url.pathname; + files.add(pathname); + } } - router.addSupervisedRoute({ - method: Method.GET, - subscription: new RouteSubscriber("/serializeDoc").add("docId"), - onValidation: async ({ req, res }) => { - const { docs, files } = await getDocs(req.params.docId); - res.send({ docs, files: Array.from(files) }); - } + if (doc.id) { + docs[doc.id] = doc; + } + return ids; + }; + await Database.Instance.visit([id], fn); + return { id, docs, files }; + } + + router.addSupervisedRoute({ + method: Method.GET, + subscription: new RouteSubscriber("/serializeDoc").add("docId"), + onValidation: async ({ req, res }) => { + const { docs, files } = await getDocs(req.params.docId); + res.send({ docs, files: Array.from(files) }); + } + }); + + router.addSupervisedRoute({ + method: Method.GET, + subscription: new RouteSubscriber(RouteStore.imageHierarchyExport).add('docId'), + onValidation: async ({ req, res }) => { + const id = req.params.docId; + const hierarchy: Hierarchy = {}; + await targetedVisitorRecursive(id, hierarchy); + BuildAndDispatchZip(res, async zip => { + await hierarchyTraverserRecursive(zip, hierarchy); }); + } + }); - router.addSupervisedRoute({ - method: Method.GET, - subscription: new RouteSubscriber(RouteStore.imageHierarchyExport).add('docId'), - onValidation: async ({ req, res }) => { - const id = req.params.docId; - const hierarchy: Hierarchy = {}; - await targetedVisitorRecursive(id, hierarchy); - BuildAndDispatchZip(res, async zip => { - await hierarchyTraverserRecursive(zip, hierarchy); - }); + const BuildAndDispatchZip = async (res: Response, mutator: ZipMutator): Promise => { + const zip = Archiver('zip'); + zip.pipe(res); + await mutator(zip); + return zip.finalize(); + }; + + const targetedVisitorRecursive = async (seedId: string, hierarchy: Hierarchy): Promise => { + const local: Hierarchy = {}; + const { title, data } = await getData(seedId); + const label = `${title} (${seedId})`; + if (Array.isArray(data)) { + hierarchy[label] = local; + await Promise.all(data.map(proxy => targetedVisitorRecursive(proxy.fieldId, local))); + } else { + hierarchy[label + path.extname(data)] = data; + } + }; + + const getData = async (seedId: string): Promise<{ data: string | any[], title: string }> => { + return new Promise<{ data: string | any[], title: string }>((resolve, reject) => { + Database.Instance.getDocument(seedId, async (result: any) => { + const { data, proto, title } = result.fields; + if (data) { + if (data.url) { + resolve({ data: data.url, title }); + } else if (data.fields) { + resolve({ data: data.fields, title }); + } else { + reject(); + } + } + if (proto) { + getData(proto.fieldId).then(resolve, reject); } }); + }); + }; - const BuildAndDispatchZip = async (res: Response, mutator: ZipMutator): Promise => { - const zip = Archiver('zip'); - zip.pipe(res); - await mutator(zip); - return zip.finalize(); - }; - - const targetedVisitorRecursive = async (seedId: string, hierarchy: Hierarchy): Promise => { - const local: Hierarchy = {}; - const { title, data } = await getData(seedId); - const label = `${title} (${seedId})`; - if (Array.isArray(data)) { - hierarchy[label] = local; - await Promise.all(data.map(proxy => targetedVisitorRecursive(proxy.fieldId, local))); + const hierarchyTraverserRecursive = async (file: Archiver.Archiver, hierarchy: Hierarchy, prefix = "Dash Export"): Promise => { + for (const key of Object.keys(hierarchy)) { + const result = hierarchy[key]; + if (typeof result === "string") { + let path: string; + let matches: RegExpExecArray | null; + if ((matches = /\:1050\/files\/(upload\_[\da-z]{32}.*)/g.exec(result)) !== null) { + path = `${__dirname}/public/files/${matches[1]}`; } else { - hierarchy[label + path.extname(data)] = data; + const information = await DashUploadUtils.UploadImage(result); + path = information.mediaPaths[0]; } - }; + file.file(path, { name: key, prefix }); + } else { + await hierarchyTraverserRecursive(file, result, `${prefix}/${key}`); + } + } + }; - const getData = async (seedId: string): Promise<{ data: string | any[], title: string }> => { - return new Promise<{ data: string | any[], title: string }>((resolve, reject) => { - Database.Instance.getDocument(seedId, async (result: any) => { - const { data, proto, title } = result.fields; - if (data) { - if (data.url) { - resolve({ data: data.url, title }); - } else if (data.fields) { - resolve({ data: data.fields, title }); - } else { - reject(); - } - } - if (proto) { - getData(proto.fieldId).then(resolve, reject); - } - }); - }); - }; + router.addSupervisedRoute({ + method: Method.GET, + subscription: new RouteSubscriber("/downloadId").add("docId"), + onValidation: async ({ req, res }) => { + res.set('Content-disposition', `attachment;`); + res.set('Content-Type', "application/zip"); + const { id, docs, files } = await getDocs(req.params.docId); + const docString = JSON.stringify({ id, docs }); + const zip = Archiver('zip'); + zip.pipe(res); + zip.append(docString, { name: "doc.json" }); + files.forEach(val => { + zip.file(__dirname + RouteStore.public + val, { name: val.substring(1) }); + }); + zip.finalize(); + } + }); - const hierarchyTraverserRecursive = async (file: Archiver.Archiver, hierarchy: Hierarchy, prefix = "Dash Export"): Promise => { - for (const key of Object.keys(hierarchy)) { - const result = hierarchy[key]; - if (typeof result === "string") { - let path: string; - let matches: RegExpExecArray | null; - if ((matches = /\:1050\/files\/(upload\_[\da-z]{32}.*)/g.exec(result)) !== null) { - path = `${__dirname}/public/files/${matches[1]}`; - } else { - const information = await DashUploadUtils.UploadImage(result); - path = information.mediaPaths[0]; - } - file.file(path, { name: key, prefix }); - } else { - await hierarchyTraverserRecursive(file, result, `${prefix}/${key}`); - } + router.addSupervisedRoute({ + method: Method.POST, + subscription: "/uploadDoc", + onValidation: ({ req, res }) => { + let form = new formidable.IncomingForm(); + form.keepExtensions = true; + // let path = req.body.path; + const ids: { [id: string]: string } = {}; + let remap = true; + const getId = (id: string): string => { + if (!remap) return id; + if (id.endsWith("Proto")) return id; + if (id in ids) { + return ids[id]; + } else { + return ids[id] = v4(); } }; - - router.addSupervisedRoute({ - method: Method.GET, - subscription: new RouteSubscriber("/downloadId").add("docId"), - onValidation: async ({ req, res }) => { - res.set('Content-disposition', `attachment;`); - res.set('Content-Type', "application/zip"); - const { id, docs, files } = await getDocs(req.params.docId); - const docString = JSON.stringify({ id, docs }); - const zip = Archiver('zip'); - zip.pipe(res); - zip.append(docString, { name: "doc.json" }); - files.forEach(val => { - zip.file(__dirname + RouteStore.public + val, { name: val.substring(1) }); - }); - zip.finalize(); + const mapFn = (doc: any) => { + if (doc.id) { + doc.id = getId(doc.id); } - }); + for (const key in doc.fields) { + if (!doc.fields.hasOwnProperty(key)) { + continue; + } + const field = doc.fields[key]; + if (field === undefined || field === null) { + continue; + } - router.addSupervisedRoute({ - method: Method.POST, - subscription: "/uploadDoc", - onValidation: ({ req, res }) => { - let form = new formidable.IncomingForm(); - form.keepExtensions = true; - // let path = req.body.path; - const ids: { [id: string]: string } = {}; - let remap = true; - const getId = (id: string): string => { - if (!remap) return id; - if (id.endsWith("Proto")) return id; - if (id in ids) { - return ids[id]; - } else { - return ids[id] = v4(); + if (field.__type === "proxy" || field.__type === "prefetch_proxy") { + field.fieldId = getId(field.fieldId); + } else if (field.__type === "script" || field.__type === "computed") { + if (field.captures) { + field.captures.fieldId = getId(field.captures.fieldId); } - }; - const mapFn = (doc: any) => { - if (doc.id) { - doc.id = getId(doc.id); - } - for (const key in doc.fields) { - if (!doc.fields.hasOwnProperty(key)) { - continue; - } - const field = doc.fields[key]; - if (field === undefined || field === null) { - continue; - } - - if (field.__type === "proxy" || field.__type === "prefetch_proxy") { - field.fieldId = getId(field.fieldId); - } else if (field.__type === "script" || field.__type === "computed") { - if (field.captures) { - field.captures.fieldId = getId(field.captures.fieldId); - } - } else if (field.__type === "list") { - mapFn(field); - } else if (typeof field === "string") { - const re = /("(?:dataD|d)ocumentId"\s*:\s*")([\w\-]*)"/g; - doc.fields[key] = (field as any).replace(re, (match: any, p1: string, p2: string) => { - return `${p1}${getId(p2)}"`; - }); - } else if (field.__type === "RichTextField") { - const re = /("href"\s*:\s*")(.*?)"/g; - field.Data = field.Data.replace(re, (match: any, p1: string, p2: string) => { - return `${p1}${getId(p2)}"`; - }); + } else if (field.__type === "list") { + mapFn(field); + } else if (typeof field === "string") { + const re = /("(?:dataD|d)ocumentId"\s*:\s*")([\w\-]*)"/g; + doc.fields[key] = (field as any).replace(re, (match: any, p1: string, p2: string) => { + return `${p1}${getId(p2)}"`; + }); + } else if (field.__type === "RichTextField") { + const re = /("href"\s*:\s*")(.*?)"/g; + field.Data = field.Data.replace(re, (match: any, p1: string, p2: string) => { + return `${p1}${getId(p2)}"`; + }); + } + } + }; + form.parse(req, async (err, fields, files) => { + remap = fields.remap !== "false"; + let id: string = ""; + try { + for (const name in files) { + const path_2 = files[name].path; + const zip = new AdmZip(path_2); + zip.getEntries().forEach((entry: any) => { + if (!entry.entryName.startsWith("files/")) return; + let dirname = path.dirname(entry.entryName) + "/"; + let extname = path.extname(entry.entryName); + let basename = path.basename(entry.entryName).split(".")[0]; + // zip.extractEntryTo(dirname + basename + "_o" + extname, __dirname + RouteStore.public, true, false); + // zip.extractEntryTo(dirname + basename + "_s" + extname, __dirname + RouteStore.public, true, false); + // zip.extractEntryTo(dirname + basename + "_m" + extname, __dirname + RouteStore.public, true, false); + // zip.extractEntryTo(dirname + basename + "_l" + extname, __dirname + RouteStore.public, true, false); + try { + zip.extractEntryTo(entry.entryName, __dirname + RouteStore.public, true, false); + dirname = "/" + dirname; + + fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_o" + extname)); + fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_s" + extname)); + fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_m" + extname)); + fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_l" + extname)); + } catch (e) { + console.log(e); } - } - }; - form.parse(req, async (err, fields, files) => { - remap = fields.remap !== "false"; - let id: string = ""; + }); + const json = zip.getEntry("doc.json"); + let docs: any; try { - for (const name in files) { - const path_2 = files[name].path; - const zip = new AdmZip(path_2); - zip.getEntries().forEach((entry: any) => { - if (!entry.entryName.startsWith("files/")) return; - let dirname = path.dirname(entry.entryName) + "/"; - let extname = path.extname(entry.entryName); - let basename = path.basename(entry.entryName).split(".")[0]; - // zip.extractEntryTo(dirname + basename + "_o" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_s" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_m" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_l" + extname, __dirname + RouteStore.public, true, false); - try { - zip.extractEntryTo(entry.entryName, __dirname + RouteStore.public, true, false); - dirname = "/" + dirname; - - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_o" + extname)); - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_s" + extname)); - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_m" + extname)); - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_l" + extname)); - } catch (e) { - console.log(e); - } - }); - const json = zip.getEntry("doc.json"); - let docs: any; - try { - let data = JSON.parse(json.getData().toString("utf8")); - docs = data.docs; - id = data.id; - docs = Object.keys(docs).map(key => docs[key]); - docs.forEach(mapFn); - await Promise.all(docs.map((doc: any) => new Promise(res => Database.Instance.replace(doc.id, doc, (err, r) => { - err && console.log(err); - res(); - }, true, "newDocuments")))); - } catch (e) { console.log(e); } - fs.unlink(path_2, () => { }); - } - if (id) { - res.send(JSON.stringify(getId(id))); - } else { - res.send(JSON.stringify("error")); - } + let data = JSON.parse(json.getData().toString("utf8")); + docs = data.docs; + id = data.id; + docs = Object.keys(docs).map(key => docs[key]); + docs.forEach(mapFn); + await Promise.all(docs.map((doc: any) => new Promise(res => Database.Instance.replace(doc.id, doc, (err, r) => { + err && console.log(err); + res(); + }, true, "newDocuments")))); } catch (e) { console.log(e); } - }); - } + fs.unlink(path_2, () => { }); + } + if (id) { + res.send(JSON.stringify(getId(id))); + } else { + res.send(JSON.stringify("error")); + } + } catch (e) { console.log(e); } }); + } + }); - router.addSupervisedRoute({ - method: Method.GET, - subscription: new RouteSubscriber("/thumbnail").add("filename"), - onValidation: ({ req, res }) => { - let filename = req.params.filename; - let noExt = filename.substring(0, filename.length - ".png".length); - let pagenumber = parseInt(noExt.split('-')[1]); - fs.exists(uploadDirectory + filename, (exists: boolean) => { - console.log(`${uploadDirectory + filename} ${exists ? "exists" : "does not exist"}`); - if (exists) { - let input = fs.createReadStream(uploadDirectory + filename); - probe(input, (err: any, result: any) => { - if (err) { - console.log(err); - console.log(`error on ${filename}`); - return; - } - res.send({ path: "/files/" + filename, width: result.width, height: result.height }); - }); - } - else { - LoadPage(uploadDirectory + filename.substring(0, filename.length - noExt.split('-')[1].length - ".PNG".length - 1) + ".pdf", pagenumber, res); + router.addSupervisedRoute({ + method: Method.GET, + subscription: new RouteSubscriber("/thumbnail").add("filename"), + onValidation: ({ req, res }) => { + let filename = req.params.filename; + let noExt = filename.substring(0, filename.length - ".png".length); + let pagenumber = parseInt(noExt.split('-')[1]); + fs.exists(uploadDirectory + filename, (exists: boolean) => { + console.log(`${uploadDirectory + filename} ${exists ? "exists" : "does not exist"}`); + if (exists) { + let input = fs.createReadStream(uploadDirectory + filename); + probe(input, (err: any, result: any) => { + if (err) { + console.log(err); + console.log(`error on ${filename}`); + return; } + res.send({ path: "/files/" + filename, width: result.width, height: result.height }); }); } + else { + LoadPage(uploadDirectory + filename.substring(0, filename.length - noExt.split('-')[1].length - ".PNG".length - 1) + ".pdf", pagenumber, res); + } }); + } + }); - function LoadPage(file: string, pageNumber: number, res: Response) { - console.log(file); - Pdfjs.getDocument(file).promise - .then((pdf: Pdfjs.PDFDocumentProxy) => { - let factory = new NodeCanvasFactory(); - console.log(pageNumber); - pdf.getPage(pageNumber).then((page: Pdfjs.PDFPageProxy) => { - console.log("reading " + page); - let viewport = page.getViewport(1 as any); - let canvasAndContext = factory.create(viewport.width, viewport.height); - let renderContext = { - canvasContext: canvasAndContext.context, - viewport: viewport, - canvasFactory: factory - }; - console.log("read " + pageNumber); - - page.render(renderContext).promise - .then(() => { - console.log("saving " + pageNumber); - let stream = canvasAndContext.canvas.createPNGStream(); - let pngFile = `${file.substring(0, file.length - ".pdf".length)}-${pageNumber}.PNG`; - let out = fs.createWriteStream(pngFile); - stream.pipe(out); - out.on("finish", () => { - console.log(`Success! Saved to ${pngFile}`); - let name = path.basename(pngFile); - res.send({ path: "/files/" + name, width: viewport.width, height: viewport.height }); - }); - }, (reason: string) => { - console.error(reason + ` ${pageNumber}`); - }); + function LoadPage(file: string, pageNumber: number, res: Response) { + console.log(file); + Pdfjs.getDocument(file).promise + .then((pdf: Pdfjs.PDFDocumentProxy) => { + let factory = new NodeCanvasFactory(); + console.log(pageNumber); + pdf.getPage(pageNumber).then((page: Pdfjs.PDFPageProxy) => { + console.log("reading " + page); + let viewport = page.getViewport(1 as any); + let canvasAndContext = factory.create(viewport.width, viewport.height); + let renderContext = { + canvasContext: canvasAndContext.context, + viewport: viewport, + canvasFactory: factory + }; + console.log("read " + pageNumber); + + page.render(renderContext).promise + .then(() => { + console.log("saving " + pageNumber); + let stream = canvasAndContext.canvas.createPNGStream(); + let pngFile = `${file.substring(0, file.length - ".pdf".length)}-${pageNumber}.PNG`; + let out = fs.createWriteStream(pngFile); + stream.pipe(out); + out.on("finish", () => { + console.log(`Success! Saved to ${pngFile}`); + let name = path.basename(pngFile); + res.send({ path: "/files/" + name, width: viewport.width, height: viewport.height }); + }); + }, (reason: string) => { + console.error(reason + ` ${pageNumber}`); }); - }); - } - - /** - * Anyone attempting to navigate to localhost at this port will - * first have to log in. - */ - router.addSupervisedRoute({ - method: Method.GET, - subscription: RouteStore.root, - onValidation: ({ res }) => res.redirect(RouteStore.home) + }); }); + } + + /** + * Anyone attempting to navigate to localhost at this port will + * first have to log in. + */ + router.addSupervisedRoute({ + method: Method.GET, + subscription: RouteStore.root, + onValidation: ({ res }) => res.redirect(RouteStore.home) + }); - router.addSupervisedRoute({ - method: Method.GET, - subscription: RouteStore.getUsers, - onValidation: async ({ res }) => { - const cursor = await Database.Instance.query({}, { email: 1, userDocumentId: 1 }, "users"); - const results = await cursor.toArray(); - res.send(results.map(user => ({ email: user.email, userDocumentId: user.userDocumentId }))); - } - }); + router.addSupervisedRoute({ + method: Method.GET, + subscription: RouteStore.getUsers, + onValidation: async ({ res }) => { + const cursor = await Database.Instance.query({}, { email: 1, userDocumentId: 1 }, "users"); + const results = await cursor.toArray(); + res.send(results.map(user => ({ email: user.email, userDocumentId: user.userDocumentId }))); + } + }); - const serve: OnUnauthenticated = ({ req, res }) => { - let detector = new mobileDetect(req.headers['user-agent'] || ""); - let filename = detector.mobile() !== null ? 'mobile/image.html' : 'index.html'; - res.sendFile(path.join(__dirname, '../../deploy/' + filename)); - }; + const serve: OnUnauthenticated = ({ req, res }) => { + let detector = new mobileDetect(req.headers['user-agent'] || ""); + let filename = detector.mobile() !== null ? 'mobile/image.html' : 'index.html'; + res.sendFile(path.join(__dirname, '../../deploy/' + filename)); + }; - router.addSupervisedRoute({ - method: Method.GET, - subscription: [RouteStore.home, new RouteSubscriber("/doc").add("docId")], - onValidation: serve, - onUnauthenticated: ({ req, ...remaining }) => { - const { originalUrl: target } = req; - const sharing = qs.parse(qs.extract(req.originalUrl), { sort: false }).sharing === "true"; - const docAccess = target.startsWith("/doc/"); - if (sharing && docAccess) { - serve({ req, ...remaining }); - } - } - }); + router.addSupervisedRoute({ + method: Method.GET, + subscription: [RouteStore.home, new RouteSubscriber("/doc").add("docId")], + onValidation: serve, + onUnauthenticated: ({ req, ...remaining }) => { + const { originalUrl: target } = req; + const sharing = qs.parse(qs.extract(req.originalUrl), { sort: false }).sharing === "true"; + const docAccess = target.startsWith("/doc/"); + if (sharing && docAccess) { + serve({ req, ...remaining }); + } + } + }); - router.addSupervisedRoute({ - method: Method.GET, - subscription: RouteStore.getUserDocumentId, - onValidation: ({ res, user }) => res.send(user.userDocumentId) - }); + router.addSupervisedRoute({ + method: Method.GET, + subscription: RouteStore.getUserDocumentId, + onValidation: ({ res, user }) => res.send(user.userDocumentId) + }); - router.addSupervisedRoute({ - method: Method.GET, - subscription: RouteStore.getCurrUser, - onValidation: ({ res, user }) => res.send(JSON.stringify(user)), - onUnauthenticated: ({ res }) => res.send(JSON.stringify({ id: "__guest__", email: "" })) - }); + router.addSupervisedRoute({ + method: Method.GET, + subscription: RouteStore.getCurrUser, + onValidation: ({ res, user }) => res.send(JSON.stringify(user)), + onUnauthenticated: ({ res }) => res.send(JSON.stringify({ id: "__guest__", email: "" })) + }); - const ServicesApiKeyMap = new Map([ - ["face", process.env.FACE], - ["vision", process.env.VISION], - ["handwriting", process.env.HANDWRITING] - ]); - - router.addSupervisedRoute({ - method: Method.GET, - subscription: new RouteSubscriber(RouteStore.cognitiveServices).add('requestedservice'), - onValidation: ({ req, res }) => { - let service = req.params.requestedservice; - res.send(ServicesApiKeyMap.get(service)); - } - }); + const ServicesApiKeyMap = new Map([ + ["face", process.env.FACE], + ["vision", process.env.VISION], + ["handwriting", process.env.HANDWRITING] + ]); + + router.addSupervisedRoute({ + method: Method.GET, + subscription: new RouteSubscriber(RouteStore.cognitiveServices).add('requestedservice'), + onValidation: ({ req, res }) => { + let service = req.params.requestedservice; + res.send(ServicesApiKeyMap.get(service)); + } + }); - class NodeCanvasFactory { - create = (width: number, height: number) => { - var canvas = createCanvas(width, height); - var context = canvas.getContext('2d'); - return { - canvas: canvas, - context: context, - }; - } + class NodeCanvasFactory { + create = (width: number, height: number) => { + var canvas = createCanvas(width, height); + var context = canvas.getContext('2d'); + return { + canvas: canvas, + context: context, + }; + } - reset = (canvasAndContext: any, width: number, height: number) => { - canvasAndContext.canvas.width = width; - canvasAndContext.canvas.height = height; - } + reset = (canvasAndContext: any, width: number, height: number) => { + canvasAndContext.canvas.width = width; + canvasAndContext.canvas.height = height; + } + + destroy = (canvasAndContext: any) => { + canvasAndContext.canvas.width = 0; + canvasAndContext.canvas.height = 0; + canvasAndContext.canvas = null; + canvasAndContext.context = null; + } + } + + interface ImageFileResponse { + name: string; + path: string; + type: string; + exif: Opt; + } + + router.addSupervisedRoute({ + method: Method.POST, + subscription: RouteStore.upload, + onValidation: ({ req, res }) => { + let form = new formidable.IncomingForm(); + form.uploadDir = uploadDirectory; + form.keepExtensions = true; + form.parse(req, async (_err, _fields, files) => { + let results: ImageFileResponse[] = []; + for (const key in files) { + const { type, path: location, name } = files[key]; + const filename = path.basename(location); + let uploadInformation: Opt; + if (filename.endsWith(".pdf")) { + let dataBuffer = fs.readFileSync(uploadDirectory + filename); + const result: ParsedPDF = await pdf(dataBuffer); + await new Promise(resolve => { + const path = pdfDirectory + "/" + filename.substring(0, filename.length - ".pdf".length) + ".txt"; + fs.createWriteStream(path).write(result.text, error => { + if (!error) { + resolve(); + } else { + reject(error); + } + }); + }); + } else { + uploadInformation = await DashUploadUtils.UploadImage(uploadDirectory + filename, filename); + } + const exif = uploadInformation ? uploadInformation.exifData : undefined; + results.push({ name, type, path: `/files/${filename}`, exif }); - destroy = (canvasAndContext: any) => { - canvasAndContext.canvas.width = 0; - canvasAndContext.canvas.height = 0; - canvasAndContext.canvas = null; - canvasAndContext.context = null; } - } + _success(res, results); + }); + } + }); - interface ImageFileResponse { - name: string; - path: string; - type: string; - exif: Opt; + router.addSupervisedRoute({ + method: Method.POST, + subscription: RouteStore.inspectImage, + onValidation: async ({ req, res }) => { + const { source } = req.body; + if (typeof source === "string") { + const uploadInformation = await DashUploadUtils.UploadImage(source); + return res.send(await DashUploadUtils.InspectImage(uploadInformation.mediaPaths[0])); } + res.send({}); + } + }); - router.addSupervisedRoute({ - method: Method.POST, - subscription: RouteStore.upload, - onValidation: ({ req, res }) => { - let form = new formidable.IncomingForm(); - form.uploadDir = uploadDirectory; - form.keepExtensions = true; - form.parse(req, async (_err, _fields, files) => { - let results: ImageFileResponse[] = []; - for (const key in files) { - const { type, path: location, name } = files[key]; - const filename = path.basename(location); - let uploadInformation: Opt; - if (filename.endsWith(".pdf")) { - let dataBuffer = fs.readFileSync(uploadDirectory + filename); - const result: ParsedPDF = await pdf(dataBuffer); - await new Promise(resolve => { - const path = pdfDirectory + "/" + filename.substring(0, filename.length - ".pdf".length) + ".txt"; - fs.createWriteStream(path).write(result.text, error => { - if (!error) { - resolve(); - } else { - reject(error); - } - }); - }); - } else { - uploadInformation = await DashUploadUtils.UploadImage(uploadDirectory + filename, filename); - } - const exif = uploadInformation ? uploadInformation.exifData : undefined; - results.push({ name, type, path: `/files/${filename}`, exif }); - - } - _success(res, results); + router.addSupervisedRoute({ + method: Method.POST, + subscription: RouteStore.dataUriToImage, + onValidation: ({ req, res }) => { + const uri = req.body.uri; + const filename = req.body.name; + if (!uri || !filename) { + res.status(401).send("incorrect parameters specified"); + return; + } + imageDataUri.outputFile(uri, uploadDirectory + filename).then((savedName: string) => { + const ext = path.extname(savedName); + let resizers = [ + { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, + { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: "_m" }, + { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: "_l" }, + ]; + let isImage = false; + if (pngTypes.includes(ext)) { + resizers.forEach(element => { + element.resizer = element.resizer.png(); }); + isImage = true; + } else if (jpgTypes.includes(ext)) { + resizers.forEach(element => { + element.resizer = element.resizer.jpeg(); + }); + isImage = true; } - }); - - router.addSupervisedRoute({ - method: Method.POST, - subscription: RouteStore.inspectImage, - onValidation: async ({ req, res }) => { - const { source } = req.body; - if (typeof source === "string") { - const uploadInformation = await DashUploadUtils.UploadImage(source); - return res.send(await DashUploadUtils.InspectImage(uploadInformation.mediaPaths[0])); - } - res.send({}); - } - }); - - router.addSupervisedRoute({ - method: Method.POST, - subscription: RouteStore.dataUriToImage, - onValidation: ({ req, res }) => { - const uri = req.body.uri; - const filename = req.body.name; - if (!uri || !filename) { - res.status(401).send("incorrect parameters specified"); - return; - } - imageDataUri.outputFile(uri, uploadDirectory + filename).then((savedName: string) => { - const ext = path.extname(savedName); - let resizers = [ - { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, - { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: "_m" }, - { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: "_l" }, - ]; - let isImage = false; - if (pngTypes.includes(ext)) { - resizers.forEach(element => { - element.resizer = element.resizer.png(); - }); - isImage = true; - } else if (jpgTypes.includes(ext)) { - resizers.forEach(element => { - element.resizer = element.resizer.jpeg(); - }); - isImage = true; - } - if (isImage) { - resizers.forEach(resizer => { - fs.createReadStream(savedName).pipe(resizer.resizer).pipe(fs.createWriteStream(uploadDirectory + filename + resizer.suffix + ext)); - }); - } - res.send("/files/" + filename + ext); + if (isImage) { + resizers.forEach(resizer => { + fs.createReadStream(savedName).pipe(resizer.resizer).pipe(fs.createWriteStream(uploadDirectory + filename + resizer.suffix + ext)); }); } + res.send("/files/" + filename + ext); }); + } + }); - router.addSupervisedRoute({ - method: Method.GET, - subscription: RouteStore.delete, - onValidation: ({ res, isRelease }) => { - if (isRelease) { - return _permission_denied(res, deletionPermissionError); - } - WebSocket.deleteFields().then(() => res.redirect(RouteStore.home)); - } - }); + router.addSupervisedRoute({ + method: Method.GET, + subscription: RouteStore.delete, + onValidation: ({ res, isRelease }) => { + if (isRelease) { + return _permission_denied(res, deletionPermissionError); + } + WebSocket.deleteFields().then(() => res.redirect(RouteStore.home)); + } + }); - router.addSupervisedRoute({ - method: Method.GET, - subscription: RouteStore.deleteAll, - onValidation: ({ res, isRelease }) => { - if (isRelease) { - return _permission_denied(res, deletionPermissionError); - } - WebSocket.deleteAll().then(() => res.redirect(RouteStore.home)); - } - }); + router.addSupervisedRoute({ + method: Method.GET, + subscription: RouteStore.deleteAll, + onValidation: ({ res, isRelease }) => { + if (isRelease) { + return _permission_denied(res, deletionPermissionError); + } + WebSocket.deleteAll().then(() => res.redirect(RouteStore.home)); + } + }); - const credentialsPath = path.join(__dirname, "./credentials/google_docs_credentials.json"); - - const EndpointHandlerMap = new Map([ - ["create", (api, params) => api.create(params)], - ["retrieve", (api, params) => api.get(params)], - ["update", (api, params) => api.batchUpdate(params)], - ]); - - router.addSupervisedRoute({ - method: Method.POST, - subscription: new RouteSubscriber(RouteStore.googleDocs).add("sector", "action"), - onValidation: ({ req, res }) => { - let sector: GoogleApiServerUtils.Service = req.params.sector as GoogleApiServerUtils.Service; - let action: GoogleApiServerUtils.Action = req.params.action as GoogleApiServerUtils.Action; - GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], { credentialsPath, userId: req.headers.userId as string }).then(endpoint => { - let handler = EndpointHandlerMap.get(action); - if (endpoint && handler) { - let execute = handler(endpoint, req.body).then( - response => res.send(response.data), - rejection => res.send(rejection) - ); - execute.catch(exception => res.send(exception)); - return; - } - res.send(undefined); - }); + const credentialsPath = path.join(__dirname, "./credentials/google_docs_credentials.json"); + + const EndpointHandlerMap = new Map([ + ["create", (api, params) => api.create(params)], + ["retrieve", (api, params) => api.get(params)], + ["update", (api, params) => api.batchUpdate(params)], + ]); + + router.addSupervisedRoute({ + method: Method.POST, + subscription: new RouteSubscriber(RouteStore.googleDocs).add("sector", "action"), + onValidation: ({ req, res }) => { + let sector: GoogleApiServerUtils.Service = req.params.sector as GoogleApiServerUtils.Service; + let action: GoogleApiServerUtils.Action = req.params.action as GoogleApiServerUtils.Action; + GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], { credentialsPath, userId: req.headers.userId as string }).then(endpoint => { + let handler = EndpointHandlerMap.get(action); + if (endpoint && handler) { + let execute = handler(endpoint, req.body).then( + response => res.send(response.data), + rejection => res.send(rejection) + ); + execute.catch(exception => res.send(exception)); + return; } + res.send(undefined); }); + } + }); - router.addSupervisedRoute({ - method: Method.GET, - subscription: RouteStore.readGoogleAccessToken, - onValidation: async ({ user, res }) => { - const userId = user.id; - const token = await Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId); - const information = { credentialsPath, userId }; - if (!token) { - return res.send(await GoogleApiServerUtils.GenerateAuthenticationUrl(information)); - } - GoogleApiServerUtils.RetrieveAccessToken(information).then(token => res.send(token)); - } - }); + router.addSupervisedRoute({ + method: Method.GET, + subscription: RouteStore.readGoogleAccessToken, + onValidation: async ({ user, res }) => { + const userId = user.id; + const token = await Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId); + const information = { credentialsPath, userId }; + if (!token) { + return res.send(await GoogleApiServerUtils.GenerateAuthenticationUrl(information)); + } + GoogleApiServerUtils.RetrieveAccessToken(information).then(token => res.send(token)); + } + }); - router.addSupervisedRoute({ - method: Method.POST, - subscription: RouteStore.writeGoogleAccessToken, - onValidation: async ({ user, req, res }) => { - const userId = user.id; - const information = { credentialsPath, userId }; - res.send(await GoogleApiServerUtils.ProcessClientSideCode(information, req.body.authenticationCode)); - } - }); + router.addSupervisedRoute({ + method: Method.POST, + subscription: RouteStore.writeGoogleAccessToken, + onValidation: async ({ user, req, res }) => { + const userId = user.id; + const information = { credentialsPath, userId }; + res.send(await GoogleApiServerUtils.ProcessClientSideCode(information, req.body.authenticationCode)); + } + }); - const tokenError = "Unable to successfully upload bytes for all images!"; - const mediaError = "Unable to convert all uploaded bytes to media items!"; - const userIdError = "Unable to parse the identification of the user!"; - - router.addSupervisedRoute({ - method: Method.POST, - subscription: RouteStore.googlePhotosMediaUpload, - onValidation: async ({ user, req, res }) => { - const { media } = req.body; - const userId = user.id; - if (!userId) { - return _error(res, userIdError); - } + const tokenError = "Unable to successfully upload bytes for all images!"; + const mediaError = "Unable to convert all uploaded bytes to media items!"; + const userIdError = "Unable to parse the identification of the user!"; + + router.addSupervisedRoute({ + method: Method.POST, + subscription: RouteStore.googlePhotosMediaUpload, + onValidation: async ({ user, req, res }) => { + const { media } = req.body; + const userId = user.id; + if (!userId) { + return _error(res, userIdError); + } - await GooglePhotosUploadUtils.initialize({ credentialsPath, userId }); + await GooglePhotosUploadUtils.initialize({ credentialsPath, userId }); - let failed: number[] = []; + let failed: number[] = []; - const newMediaItems = await BatchedArray.from(media, { batchSize: 25 }).batchedMapPatientInterval( - { magnitude: 100, unit: TimeUnit.Milliseconds }, - async (batch: GooglePhotosUploadUtils.MediaInput[]) => { - const newMediaItems: NewMediaItem[] = []; - for (let index = 0; index < batch.length; index++) { - const element = batch[index]; - const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(element.url); - if (!uploadToken) { - failed.push(index); - } else { - newMediaItems.push({ - description: element.description, - simpleMediaItem: { uploadToken } - }); - } - } - return newMediaItems; + const newMediaItems = await BatchedArray.from(media, { batchSize: 25 }).batchedMapPatientInterval( + { magnitude: 100, unit: TimeUnit.Milliseconds }, + async (batch: GooglePhotosUploadUtils.MediaInput[]) => { + const newMediaItems: NewMediaItem[] = []; + for (let index = 0; index < batch.length; index++) { + const element = batch[index]; + const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(element.url); + if (!uploadToken) { + failed.push(index); + } else { + newMediaItems.push({ + description: element.description, + simpleMediaItem: { uploadToken } + }); } - ); - - const failedCount = failed.length; - if (failedCount) { - console.error(`Unable to upload ${failedCount} image${failedCount === 1 ? "" : "s"} to Google's servers`); } - - GooglePhotosUploadUtils.CreateMediaItems(newMediaItems, req.body.album).then( - result => _success(res, { results: result.newMediaItemResults, failed }), - error => _error(res, mediaError, error) - ); + return newMediaItems; } - }); + ); - interface MediaItem { - baseUrl: string; - filename: string; + const failedCount = failed.length; + if (failedCount) { + console.error(`Unable to upload ${failedCount} image${failedCount === 1 ? "" : "s"} to Google's servers`); } - const prefix = "google_photos_"; - - const downloadError = "Encountered an error while executing downloads."; - const requestError = "Unable to execute download: the body's media items were malformed."; - const deletionPermissionError = "Cannot perform specialized delete outside of the development environment!"; - - router.addSupervisedRoute({ - method: Method.GET, - subscription: "/deleteWithAux", - onValidation: async ({ res, isRelease }) => { - if (isRelease) { - return _permission_denied(res, deletionPermissionError); - } - await Database.Auxiliary.DeleteAll(); - res.redirect(RouteStore.delete); - } - }); - router.addSupervisedRoute({ - method: Method.GET, - subscription: "/deleteWithGoogleCredentials", - onValidation: async ({ res, isRelease }) => { - if (isRelease) { - return _permission_denied(res, deletionPermissionError); - } - await Database.Auxiliary.GoogleAuthenticationToken.DeleteAll(); - res.redirect(RouteStore.delete); - } - }); + GooglePhotosUploadUtils.CreateMediaItems(newMediaItems, req.body.album).then( + result => _success(res, { results: result.newMediaItemResults, failed }), + error => _error(res, mediaError, error) + ); + } + }); - const UploadError = (count: number) => `Unable to upload ${count} images to Dash's server`; - router.addSupervisedRoute({ - method: Method.POST, - subscription: RouteStore.googlePhotosMediaDownload, - onValidation: async ({ req, res }) => { - const contents: { mediaItems: MediaItem[] } = req.body; - let failed = 0; - if (contents) { - const completed: Opt[] = []; - for (let item of contents.mediaItems) { - const { contentSize, ...attributes } = await DashUploadUtils.InspectImage(item.baseUrl); - const found: Opt = await Database.Auxiliary.QueryUploadHistory(contentSize!); - if (!found) { - const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error)); - if (upload) { - completed.push(upload); - await Database.Auxiliary.LogUpload(upload); - } else { - failed++; - } - } else { - completed.push(found); - } - } - if (failed) { - return _error(res, UploadError(failed)); + interface MediaItem { + baseUrl: string; + filename: string; + } + const prefix = "google_photos_"; + + const downloadError = "Encountered an error while executing downloads."; + const requestError = "Unable to execute download: the body's media items were malformed."; + const deletionPermissionError = "Cannot perform specialized delete outside of the development environment!"; + + router.addSupervisedRoute({ + method: Method.GET, + subscription: "/deleteWithAux", + onValidation: async ({ res, isRelease }) => { + if (isRelease) { + return _permission_denied(res, deletionPermissionError); + } + await Database.Auxiliary.DeleteAll(); + res.redirect(RouteStore.delete); + } + }); + + router.addSupervisedRoute({ + method: Method.GET, + subscription: "/deleteWithGoogleCredentials", + onValidation: async ({ res, isRelease }) => { + if (isRelease) { + return _permission_denied(res, deletionPermissionError); + } + await Database.Auxiliary.GoogleAuthenticationToken.DeleteAll(); + res.redirect(RouteStore.delete); + } + }); + + const UploadError = (count: number) => `Unable to upload ${count} images to Dash's server`; + router.addSupervisedRoute({ + method: Method.POST, + subscription: RouteStore.googlePhotosMediaDownload, + onValidation: async ({ req, res }) => { + const contents: { mediaItems: MediaItem[] } = req.body; + let failed = 0; + if (contents) { + const completed: Opt[] = []; + for (let item of contents.mediaItems) { + const { contentSize, ...attributes } = await DashUploadUtils.InspectImage(item.baseUrl); + const found: Opt = await Database.Auxiliary.QueryUploadHistory(contentSize!); + if (!found) { + const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error)); + if (upload) { + completed.push(upload); + await Database.Auxiliary.LogUpload(upload); + } else { + failed++; } - return _success(res, completed); + } else { + completed.push(found); } - _invalid(res, requestError); } - }); + if (failed) { + return _error(res, UploadError(failed)); + } + return _success(res, completed); + } + _invalid(res, requestError); } }); -})(); \ No newline at end of file +} \ No newline at end of file -- cgit v1.2.3-70-g09d2 From e6bd33867cc7f7185575666255369f55cacb9856 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sat, 26 Oct 2019 18:28:38 -0400 Subject: restructured route registration and added preliminary comments for exporter --- src/server/ApiManagers/ApiManager.ts | 10 ++- src/server/ApiManagers/ExportManager.ts | 133 ++++++++++++++++++++++++++++++++ src/server/ApiManagers/SearchManager.ts | 10 +-- src/server/ApiManagers/UserManager.ts | 10 ++- src/server/ApiManagers/UtilManager.ts | 12 +-- src/server/RouteManager.ts | 15 ++-- src/server/Websocket/Websocket.ts | 2 +- src/server/index.ts | 96 ++++------------------- 8 files changed, 179 insertions(+), 109 deletions(-) create mode 100644 src/server/ApiManagers/ExportManager.ts (limited to 'src') diff --git a/src/server/ApiManagers/ApiManager.ts b/src/server/ApiManagers/ApiManager.ts index 264c78a17..9fd726060 100644 --- a/src/server/ApiManagers/ApiManager.ts +++ b/src/server/ApiManagers/ApiManager.ts @@ -1,7 +1,11 @@ -import RouteManager from "../RouteManager"; +import RouteManager, { RouteInitializer } from "../RouteManager"; -export default abstract class ApiManager { +export type Registration = (initializer: RouteInitializer) => void; - public abstract register(router: RouteManager): void; +export default abstract class ApiManager { + protected abstract initialize(register: Registration): void; + public register(router: RouteManager) { + this.initialize(router.addSupervisedRoute); + } } \ No newline at end of file diff --git a/src/server/ApiManagers/ExportManager.ts b/src/server/ApiManagers/ExportManager.ts new file mode 100644 index 000000000..261acbbe0 --- /dev/null +++ b/src/server/ApiManagers/ExportManager.ts @@ -0,0 +1,133 @@ +import ApiManager, { Registration } from "./ApiManager"; +import RouteManager, { Method } from "../RouteManager"; +import RouteSubscriber from "../RouteSubscriber"; +import { RouteStore } from "../RouteStore"; +import * as Archiver from 'archiver'; +import * as express from 'express'; +import { Database } from "../database"; +import * as path from "path"; +import { DashUploadUtils } from "../DashUploadUtils"; + +export type Hierarchy = { [id: string]: string | Hierarchy }; +export type ZipMutator = (file: Archiver.Archiver) => void | Promise; +export interface DocumentElements { + data: string | any[]; + title: string; +} + +export default class ExportManager extends ApiManager { + + protected initialize(register: Registration): void { + + register({ + method: Method.GET, + subscription: new RouteSubscriber(RouteStore.imageHierarchyExport).add('docId'), + onValidation: async ({ req, res }) => { + const id = req.params.docId; + const hierarchy: Hierarchy = {}; + await buildHierarchyRecursive(id, hierarchy); + BuildAndDispatchZip(res, zip => writeHierarchyRecursive(zip, hierarchy)); + } + }); + } + +} + +/** + * This utility function factors out the process + * of creating a zip file and sending it back to the client + * by piping it into a response. + * + * Learn more about piping and readable / writable streams here! + * https://www.freecodecamp.org/news/node-js-streams-everything-you-need-to-know-c9141306be93/ + * + * @param res the writable stream response object that will transfer the generated zip file + * @param mutator the callback function used to actually modify and insert information into the zip instance + */ +export async function BuildAndDispatchZip(res: express.Response, mutator: ZipMutator): Promise { + const zip = Archiver('zip'); + zip.pipe(res); + await mutator(zip); + zip.finalize(); +} + +/** + * This function starts with a single document id as a seed, + * typically that of a collection, and then descends the entire tree + * of image or collection documents that are reachable from that seed. + * @param seedId the id of the root of the subtree we're trying to capture, interesting only if it's a collection + * @param hierarchy the data structure we're going to use to record the nesting of the collections and images as we descend + */ + +/* +Below is an example of the JSON hierarchy built from two images contained inside a collection titled 'a nested collection', +following the general recursive structure shown immediately below +{ + "parent folder name":{ + "first child's fild name":"first child's url" + ... + "nth child's fild name":"nth child's url" + } +} +{ + "a nested collection (865c4734-c036-4d67-a588-c71bb43d1440)":{ + "an image of a cat (ace99ffd-8ed8-4026-a5d5-a353fff57bdd).jpg":"https://upload.wikimedia.org/wikipedia/commons/thumb/3/3a/Cat03.jpg/1200px-Cat03.jpg", + "1*SGJw31T5Q9Zfsk24l2yirg.gif (9321cc9b-9b3e-4cb6-b99c-b7e667340f05).gif":"https://cdn-media-1.freecodecamp.org/images/1*SGJw31T5Q9Zfsk24l2yirg.gif" + } +} +*/ + +async function buildHierarchyRecursive(seedId: string, hierarchy: Hierarchy): Promise { + const { title, data } = await getData(seedId); + const label = `${title} (${seedId})`; + // is the document a collection? + if (Array.isArray(data)) { + // recurse over all documents in the collection. + const local: Hierarchy = {}; // create a child hierarchy for this level, which will get passed in as the parent of the recursive call + hierarchy[label] = local; // store it at the index in the parent, so we'll end up with a map of maps of maps + await Promise.all(data.map(proxy => buildHierarchyRecursive(proxy.fieldId, local))); + } else { + // now, data can only be a string, namely the url of the image + const filename = label + path.extname(data); // this is the file name under which the output image will be stored + hierarchy[filename] = data; + } +} + +async function getData(seedId: string): Promise { + return new Promise((resolve, reject) => { + Database.Instance.getDocument(seedId, async (result: any) => { + const { data, proto, title } = result.fields; + if (data) { + if (data.url) { + resolve({ data: data.url, title }); + } else if (data.fields) { + resolve({ data: data.fields, title }); + } else { + reject(); + } + } + if (proto) { + getData(proto.fieldId).then(resolve, reject); + } + }); + }); +} + +async function writeHierarchyRecursive(file: Archiver.Archiver, hierarchy: Hierarchy, prefix = "Dash Export"): Promise { + for (const key of Object.keys(hierarchy)) { + const result = hierarchy[key]; + if (typeof result === "string") { + let path: string; + let matches: RegExpExecArray | null; + if ((matches = /\:1050\/files\/(upload\_[\da-z]{32}.*)/g.exec(result)) !== null) { + path = `${__dirname}/public/files/${matches[1]}`; + } else { + const information = await DashUploadUtils.UploadImage(result); + path = information.mediaPaths[0]; + } + file.file(path, { name: key, prefix }); + } else { + await writeHierarchyRecursive(file, result, `${prefix}/${key}`); + } + } +} \ No newline at end of file diff --git a/src/server/ApiManagers/SearchManager.ts b/src/server/ApiManagers/SearchManager.ts index 15b87204c..1c4b805e5 100644 --- a/src/server/ApiManagers/SearchManager.ts +++ b/src/server/ApiManagers/SearchManager.ts @@ -1,5 +1,5 @@ -import ApiManager from "./ApiManager"; -import RouteManager, { Method } from "../RouteManager"; +import ApiManager, { Registration } from "./ApiManager"; +import { Method } from "../RouteManager"; import { Search } from "../Search"; var findInFiles = require('find-in-files'); import * as path from 'path'; @@ -7,9 +7,9 @@ import { uploadDirectory } from ".."; export default class SearchManager extends ApiManager { - public register(router: RouteManager): void { + protected initialize(register: Registration): void { - router.addSupervisedRoute({ + register({ method: Method.GET, subscription: "/textsearch", onValidation: async ({ req, res }) => { @@ -29,7 +29,7 @@ export default class SearchManager extends ApiManager { } }); - router.addSupervisedRoute({ + register({ method: Method.GET, subscription: "/search", onValidation: async ({ req, res }) => { diff --git a/src/server/ApiManagers/UserManager.ts b/src/server/ApiManagers/UserManager.ts index bb8837dc6..dd1e50133 100644 --- a/src/server/ApiManagers/UserManager.ts +++ b/src/server/ApiManagers/UserManager.ts @@ -1,11 +1,12 @@ -import ApiManager from "./ApiManager"; -import RouteManager, { Method } from "../RouteManager"; +import ApiManager, { Registration } from "./ApiManager"; +import { Method } from "../RouteManager"; import { WebSocket } from "../Websocket/Websocket"; export default class UserManager extends ApiManager { - public register(router: RouteManager): void { - router.addSupervisedRoute({ + protected initialize(register: Registration): void { + + register({ method: Method.GET, subscription: "/whosOnline", onValidation: ({ res }) => { @@ -22,6 +23,7 @@ export default class UserManager extends ApiManager { res.send(users); } }); + } private msToTime(duration: number) { diff --git a/src/server/ApiManagers/UtilManager.ts b/src/server/ApiManagers/UtilManager.ts index 79b904e8a..a3f802b20 100644 --- a/src/server/ApiManagers/UtilManager.ts +++ b/src/server/ApiManagers/UtilManager.ts @@ -1,13 +1,13 @@ -import ApiManager from "./ApiManager"; -import RouteManager, { Method } from "../RouteManager"; +import ApiManager, { Registration } from "./ApiManager"; +import { Method } from "../RouteManager"; import { exec } from 'child_process'; import { command_line } from "../ActionUtilities"; export default class UtilManager extends ApiManager { - public register(router: RouteManager): void { + protected initialize(register: Registration): void { - router.addSupervisedRoute({ + register({ method: Method.GET, subscription: "/pull", onValidation: ({ res }) => { @@ -21,7 +21,7 @@ export default class UtilManager extends ApiManager { } }); - router.addSupervisedRoute({ + register({ method: Method.GET, subscription: "/buxton", onValidation: ({ res }) => { @@ -35,7 +35,7 @@ export default class UtilManager extends ApiManager { }, }); - router.addSupervisedRoute({ + register({ method: Method.GET, subscription: "/version", onValidation: ({ res }) => { diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts index b3864e89c..ef083a88a 100644 --- a/src/server/RouteManager.ts +++ b/src/server/RouteManager.ts @@ -2,7 +2,6 @@ import RouteSubscriber from "./RouteSubscriber"; import { RouteStore } from "./RouteStore"; import { DashUserModel } from "./authentication/models/user_model"; import * as express from 'express'; -import { Opt } from "../new_fields/Doc"; export enum Method { GET, @@ -41,15 +40,10 @@ export default class RouteManager { } /** - * Please invoke this function when adding a new route to Dash's server. - * It ensures that any requests leading to or containing user-sensitive information - * does not execute unless Passport authentication detects a user logged in. - * @param method whether or not the request is a GET or a POST - * @param handler the action to invoke, recieving a DashUserModel and, as expected, the Express.Request and Express.Response - * @param onRejection an optional callback invoked on return if no user is found to be logged in - * @param subscribers the forward slash prepended path names (reference and add to RouteStore.ts) that will all invoke the given @param handler + * + * @param initializer */ - addSupervisedRoute(initializer: RouteInitializer) { + addSupervisedRoute = (initializer: RouteInitializer): void => { const { method, subscription, onValidation, onUnauthenticated, onError } = initializer; const isRelease = this._isRelease; let supervised = async (req: express.Request, res: express.Response) => { @@ -72,6 +66,9 @@ export default class RouteManager { req.session!.target = target; if (onUnauthenticated) { await tryExecute(onUnauthenticated, core); + if (!res.headersSent) { + res.redirect(RouteStore.login); + } } else { res.redirect(RouteStore.login); } diff --git a/src/server/Websocket/Websocket.ts b/src/server/Websocket/Websocket.ts index 2461dd8d5..cd2813d99 100644 --- a/src/server/Websocket/Websocket.ts +++ b/src/server/Websocket/Websocket.ts @@ -4,7 +4,7 @@ import { Client } from "../Client"; import { Socket } from "socket.io"; import { Database } from "../database"; import { Search } from "../Search"; -import io from 'socket.io'; +import * as io from 'socket.io'; import YoutubeApi from "../apis/youtube/youtubeApiSample"; import { youtubeApiKey } from ".."; diff --git a/src/server/index.ts b/src/server/index.ts index 93f4238bc..384800f23 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -33,12 +33,11 @@ import UtilManager from './ApiManagers/UtilManager'; import SearchManager from './ApiManagers/SearchManager'; import UserManager from './ApiManagers/UserManager'; import { WebSocket } from './Websocket/Websocket'; +import ExportManager from './ApiManagers/ExportManager'; +import ApiManager from './ApiManagers/ApiManager'; export let youtubeApiKey: string; -export type Hierarchy = { [id: string]: string | Hierarchy }; -export type ZipMutator = (file: Archiver.Archiver) => void | Promise; - export interface NewMediaItem { description: string; simpleMediaItem: { @@ -72,9 +71,13 @@ async function PreliminaryFunctions() { } function routeSetter(router: RouteManager) { - new UtilManager().register(router); - new SearchManager().register(router); - new UserManager().register(router); + const managers: ApiManager[] = [ + new UtilManager(), + new SearchManager(), + new UserManager(), + new ExportManager() + ]; + managers.forEach(manager => manager.register(router)); WebSocket.initialize(serverPort, router.isRelease); @@ -152,77 +155,6 @@ function routeSetter(router: RouteManager) { } }); - router.addSupervisedRoute({ - method: Method.GET, - subscription: new RouteSubscriber(RouteStore.imageHierarchyExport).add('docId'), - onValidation: async ({ req, res }) => { - const id = req.params.docId; - const hierarchy: Hierarchy = {}; - await targetedVisitorRecursive(id, hierarchy); - BuildAndDispatchZip(res, async zip => { - await hierarchyTraverserRecursive(zip, hierarchy); - }); - } - }); - - const BuildAndDispatchZip = async (res: Response, mutator: ZipMutator): Promise => { - const zip = Archiver('zip'); - zip.pipe(res); - await mutator(zip); - return zip.finalize(); - }; - - const targetedVisitorRecursive = async (seedId: string, hierarchy: Hierarchy): Promise => { - const local: Hierarchy = {}; - const { title, data } = await getData(seedId); - const label = `${title} (${seedId})`; - if (Array.isArray(data)) { - hierarchy[label] = local; - await Promise.all(data.map(proxy => targetedVisitorRecursive(proxy.fieldId, local))); - } else { - hierarchy[label + path.extname(data)] = data; - } - }; - - const getData = async (seedId: string): Promise<{ data: string | any[], title: string }> => { - return new Promise<{ data: string | any[], title: string }>((resolve, reject) => { - Database.Instance.getDocument(seedId, async (result: any) => { - const { data, proto, title } = result.fields; - if (data) { - if (data.url) { - resolve({ data: data.url, title }); - } else if (data.fields) { - resolve({ data: data.fields, title }); - } else { - reject(); - } - } - if (proto) { - getData(proto.fieldId).then(resolve, reject); - } - }); - }); - }; - - const hierarchyTraverserRecursive = async (file: Archiver.Archiver, hierarchy: Hierarchy, prefix = "Dash Export"): Promise => { - for (const key of Object.keys(hierarchy)) { - const result = hierarchy[key]; - if (typeof result === "string") { - let path: string; - let matches: RegExpExecArray | null; - if ((matches = /\:1050\/files\/(upload\_[\da-z]{32}.*)/g.exec(result)) !== null) { - path = `${__dirname}/public/files/${matches[1]}`; - } else { - const information = await DashUploadUtils.UploadImage(result); - path = information.mediaPaths[0]; - } - file.file(path, { name: key, prefix }); - } else { - await hierarchyTraverserRecursive(file, result, `${prefix}/${key}`); - } - } - }; - router.addSupervisedRoute({ method: Method.GET, subscription: new RouteSubscriber("/downloadId").add("docId"), @@ -600,22 +532,24 @@ function routeSetter(router: RouteManager) { router.addSupervisedRoute({ method: Method.GET, subscription: RouteStore.delete, - onValidation: ({ res, isRelease }) => { + onValidation: async ({ res, isRelease }) => { if (isRelease) { return _permission_denied(res, deletionPermissionError); } - WebSocket.deleteFields().then(() => res.redirect(RouteStore.home)); + await WebSocket.deleteFields(); + res.redirect(RouteStore.home); } }); router.addSupervisedRoute({ method: Method.GET, subscription: RouteStore.deleteAll, - onValidation: ({ res, isRelease }) => { + onValidation: async ({ res, isRelease }) => { if (isRelease) { return _permission_denied(res, deletionPermissionError); } - WebSocket.deleteAll().then(() => res.redirect(RouteStore.home)); + await WebSocket.deleteAll(); + res.redirect(RouteStore.home); } }); -- cgit v1.2.3-70-g09d2 From feec691275ec83e4ddd8fd8ea803f004a371cf11 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sun, 27 Oct 2019 18:46:57 -0400 Subject: refactoring oauth2 client use in google api serverside --- src/server/apis/google/GoogleApiServerUtils.ts | 87 +++++++++++--------------- src/server/index.ts | 10 ++- 2 files changed, 40 insertions(+), 57 deletions(-) (limited to 'src') diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index 6093197f1..c0824cfb7 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -1,7 +1,6 @@ import { google } from "googleapis"; -import { createInterface } from "readline"; -import { readFile, writeFile } from "fs"; -import { OAuth2Client, Credentials } from "google-auth-library"; +import { readFile } from "fs"; +import { OAuth2Client, Credentials, OAuth2ClientOptions } from "google-auth-library"; import { Opt } from "../../../new_fields/Doc"; import { GlobalOptions } from "googleapis-common"; import { GaxiosResponse } from "gaxios"; @@ -31,6 +30,8 @@ export namespace GoogleApiServerUtils { 'userinfo.profile' ]; + const ClientMapping = new Map(); + export const parseBuffer = (data: Buffer) => JSON.parse(data.toString()); export enum Service { @@ -51,11 +52,11 @@ export namespace GoogleApiServerUtils { export type Endpoint = { get: ApiHandler, create: ApiHandler, batchUpdate: ApiHandler }; export type EndpointParameters = GlobalOptions & { version: "v1" }; - export const GetEndpoint = (sector: string, paths: CredentialInformation) => { + export const GetEndpoint = (sector: string, userId: string) => { return new Promise>(resolve => { - RetrieveCredentials(paths).then(authentication => { + authorize(userId).then(({ client: auth }) => { let routed: Opt; - let parameters: EndpointParameters = { auth: authentication.client, version: "v1" }; + let parameters: EndpointParameters = { auth, version: "v1" }; switch (sector) { case Service.Documents: routed = google.docs(parameters).documents; @@ -69,16 +70,17 @@ export namespace GoogleApiServerUtils { }); }; - export const RetrieveAccessToken = (information: CredentialInformation) => { + export const RetrieveAccessToken = (userId: string): Promise => { return new Promise((resolve, reject) => { - RetrieveCredentials(information).then( - credentials => resolve(credentials.token.access_token!), + authorize(userId).then( + ({ token: { access_token } }) => resolve(access_token!), error => reject(`Error: unable to authenticate Google Photos API request.\n${error}`) ); }); }; - let AuthorizationManager: OAuth2Client; + let installed: OAuth2ClientOptions; + let worker: OAuth2Client; export const LoadOAuthClient = async () => { return new Promise((resolve, reject) => { @@ -87,15 +89,17 @@ export namespace GoogleApiServerUtils { reject(err); return console.log('Error loading client secret file:', err); } - const { client_secret, client_id, redirect_uris } = parseBuffer(credentials).installed; - AuthorizationManager = new google.auth.OAuth2(client_id, client_secret, redirect_uris[0]); + installed = parseBuffer(credentials).installed; + worker = new google.auth.OAuth2(installed); resolve(); }); }); }; + const generateClient = () => new google.auth.OAuth2(installed); + export const GenerateAuthenticationUrl = async (information: CredentialInformation) => { - return AuthorizationManager.generateAuthUrl({ + return worker.generateAuthUrl({ access_type: 'offline', scope: SCOPES.map(relative => prefix + relative), }); @@ -106,16 +110,15 @@ export namespace GoogleApiServerUtils { avatar: string; name: string; } - export const ProcessClientSideCode = async (information: CredentialInformation, authenticationCode: string): Promise => { + export const ProcessClientSideCode = async (userId: string, authenticationCode: string): Promise => { return new Promise((resolve, reject) => { - AuthorizationManager.getToken(authenticationCode, async (err, token) => { + worker.getToken(authenticationCode, async (err, token) => { if (err || !token) { reject(err); return console.error('Error retrieving access token', err); } - AuthorizationManager.setCredentials(token); const enriched = injectUserInfo(token); - await Database.Auxiliary.GoogleAuthenticationToken.Write(information.userId, enriched); + await Database.Auxiliary.GoogleAuthenticationToken.Write(userId, enriched); const { given_name, picture } = enriched.userInfo; resolve({ access_token: enriched.access_token!, @@ -155,57 +158,39 @@ export namespace GoogleApiServerUtils { sub: string; } - export const RetrieveCredentials = (information: CredentialInformation) => { - return new Promise((resolve, reject) => { - readFile(information.credentialsPath, async (err, credentials) => { - if (err) { - reject(err); - return console.log('Error loading client secret file:', err); + export const authorize = async (userId: string): Promise => { + return Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId).then(token => { + return new Promise((resolve, reject) => { + const client = generateClient(); + if (token!.expiry_date! < new Date().getTime()) { + // Token has expired, so submitting a request for a refreshed access token + return refreshToken(token!, client, userId).then(resolve, reject); } - authorize(parseBuffer(credentials), information.userId).then(resolve, reject); + // Authentication successful! + client.setCredentials(token!); + resolve({ token: token!, client }); }); }); }; - export const RetrievePhotosEndpoint = (paths: CredentialInformation) => { + export const RetrievePhotosEndpoint = (userId: string) => { return new Promise((resolve, reject) => { - RetrieveAccessToken(paths).then( + RetrieveAccessToken(userId).then( token => resolve(new Photos(token)), reject ); }); }; - type TokenResult = { token: Credentials, client: OAuth2Client }; - /** - * Create an OAuth2 client with the given credentials, and returns the promise resolving to the authenticated client - * @param {Object} credentials The authorization client credentials. - */ - export function authorize(credentials: any, userId: string): Promise { - const { client_secret, client_id, redirect_uris } = credentials.installed; - const oAuth2Client = new google.auth.OAuth2(client_id, client_secret, redirect_uris[0]); - return new Promise((resolve, reject) => { - // Attempting to authorize user (${userId}) - Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId).then(token => { - if (token!.expiry_date! < new Date().getTime()) { - // Token has expired, so submitting a request for a refreshed access token - return refreshToken(token!, client_id, client_secret, oAuth2Client, userId).then(resolve, reject); - } - // Authentication successful! - oAuth2Client.setCredentials(token!); - resolve({ token: token!, client: oAuth2Client }); - }); - }); - } + type AuthenticationResult = { token: Credentials, client: OAuth2Client }; const refreshEndpoint = "https://oauth2.googleapis.com/token"; - const refreshToken = (credentials: Credentials, client_id: string, client_secret: string, oAuth2Client: OAuth2Client, userId: string) => { - return new Promise(resolve => { + const refreshToken = (credentials: Credentials, oAuth2Client: OAuth2Client, userId: string) => { + return new Promise(resolve => { let headerParameters = { headers: { 'Content-Type': 'application/x-www-form-urlencoded' } }; let queryParameters = { refreshToken: credentials.refresh_token, - client_id, - client_secret, + ...installed, grant_type: "refresh_token" }; let url = `${refreshEndpoint}?${qs.stringify(queryParameters)}`; diff --git a/src/server/index.ts b/src/server/index.ts index 384800f23..3220a9533 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -564,10 +564,10 @@ function routeSetter(router: RouteManager) { router.addSupervisedRoute({ method: Method.POST, subscription: new RouteSubscriber(RouteStore.googleDocs).add("sector", "action"), - onValidation: ({ req, res }) => { + onValidation: ({ req, res, user }) => { let sector: GoogleApiServerUtils.Service = req.params.sector as GoogleApiServerUtils.Service; let action: GoogleApiServerUtils.Action = req.params.action as GoogleApiServerUtils.Action; - GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], { credentialsPath, userId: req.headers.userId as string }).then(endpoint => { + GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], user.id).then(endpoint => { let handler = EndpointHandlerMap.get(action); if (endpoint && handler) { let execute = handler(endpoint, req.body).then( @@ -592,7 +592,7 @@ function routeSetter(router: RouteManager) { if (!token) { return res.send(await GoogleApiServerUtils.GenerateAuthenticationUrl(information)); } - GoogleApiServerUtils.RetrieveAccessToken(information).then(token => res.send(token)); + GoogleApiServerUtils.RetrieveAccessToken(userId).then(token => res.send(token)); } }); @@ -600,9 +600,7 @@ function routeSetter(router: RouteManager) { method: Method.POST, subscription: RouteStore.writeGoogleAccessToken, onValidation: async ({ user, req, res }) => { - const userId = user.id; - const information = { credentialsPath, userId }; - res.send(await GoogleApiServerUtils.ProcessClientSideCode(information, req.body.authenticationCode)); + res.send(await GoogleApiServerUtils.ProcessClientSideCode(user.id, req.body.authenticationCode)); } }); -- cgit v1.2.3-70-g09d2 From f0f3dddbe1d3ac54d3754bb913b8ecd9eb6fcc63 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sun, 27 Oct 2019 21:05:01 -0400 Subject: further cleanup for oauth, separated access token function from client function --- src/server/apis/google/GoogleApiServerUtils.ts | 65 ++++++++++++----------- src/server/apis/google/GooglePhotosUploadUtils.ts | 19 +++---- src/server/database.ts | 2 +- src/server/index.ts | 24 +++------ 4 files changed, 47 insertions(+), 63 deletions(-) (limited to 'src') diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index c0824cfb7..88f0f3377 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -15,7 +15,6 @@ const path = require("path"); */ export namespace GoogleApiServerUtils { - // If modifying these scopes, delete token.json. const prefix = 'https://www.googleapis.com/auth/'; const SCOPES = [ 'documents.readonly', @@ -54,7 +53,7 @@ export namespace GoogleApiServerUtils { export const GetEndpoint = (sector: string, userId: string) => { return new Promise>(resolve => { - authorize(userId).then(({ client: auth }) => { + retrieveOAuthClient(userId).then(auth => { let routed: Opt; let parameters: EndpointParameters = { auth, version: "v1" }; switch (sector) { @@ -70,10 +69,23 @@ export namespace GoogleApiServerUtils { }); }; - export const RetrieveAccessToken = (userId: string): Promise => { + export const retrieveAccessToken = (userId: string): Promise => { return new Promise((resolve, reject) => { - authorize(userId).then( - ({ token: { access_token } }) => resolve(access_token!), + retrieveCredentials(userId).then( + ({ access_token }) => resolve(access_token!), + error => reject(`Error: unable to authenticate Google Photos API request.\n${error}`) + ); + }); + }; + + export const retrieveOAuthClient = (userId: string): Promise => { + return new Promise((resolve, reject) => { + retrieveCredentials(userId).then( + credentials => { + const client = generateClient(); + client.setCredentials(credentials); + resolve(client); + }, error => reject(`Error: unable to authenticate Google Photos API request.\n${error}`) ); }); @@ -82,7 +94,7 @@ export namespace GoogleApiServerUtils { let installed: OAuth2ClientOptions; let worker: OAuth2Client; - export const LoadOAuthClient = async () => { + export const loadClientSecret = async () => { return new Promise((resolve, reject) => { readFile(path.join(__dirname, "../../credentials/google_docs_credentials.json"), async (err, credentials) => { if (err) { @@ -90,7 +102,7 @@ export namespace GoogleApiServerUtils { return console.log('Error loading client secret file:', err); } installed = parseBuffer(credentials).installed; - worker = new google.auth.OAuth2(installed); + worker = generateClient(); resolve(); }); }); @@ -98,7 +110,7 @@ export namespace GoogleApiServerUtils { const generateClient = () => new google.auth.OAuth2(installed); - export const GenerateAuthenticationUrl = async (information: CredentialInformation) => { + export const generateAuthenticationUrl = async () => { return worker.generateAuthUrl({ access_type: 'offline', scope: SCOPES.map(relative => prefix + relative), @@ -110,7 +122,7 @@ export namespace GoogleApiServerUtils { avatar: string; name: string; } - export const ProcessClientSideCode = async (userId: string, authenticationCode: string): Promise => { + export const processNewUser = async (userId: string, authenticationCode: string): Promise => { return new Promise((resolve, reject) => { worker.getToken(authenticationCode, async (err, token) => { if (err || !token) { @@ -158,35 +170,25 @@ export namespace GoogleApiServerUtils { sub: string; } - export const authorize = async (userId: string): Promise => { - return Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId).then(token => { - return new Promise((resolve, reject) => { - const client = generateClient(); - if (token!.expiry_date! < new Date().getTime()) { + const retrieveCredentials = async (userId: string): Promise => { + return new Promise((resolve, reject) => { + Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId).then(credentials => { + if (!credentials) { + return reject(); + } + if (credentials!.expiry_date! < new Date().getTime()) { // Token has expired, so submitting a request for a refreshed access token - return refreshToken(token!, client, userId).then(resolve, reject); + return refreshAccessToken(credentials!, userId).then(resolve, reject); } // Authentication successful! - client.setCredentials(token!); - resolve({ token: token!, client }); + resolve(credentials); }); }); }; - export const RetrievePhotosEndpoint = (userId: string) => { - return new Promise((resolve, reject) => { - RetrieveAccessToken(userId).then( - token => resolve(new Photos(token)), - reject - ); - }); - }; - - type AuthenticationResult = { token: Credentials, client: OAuth2Client }; - const refreshEndpoint = "https://oauth2.googleapis.com/token"; - const refreshToken = (credentials: Credentials, oAuth2Client: OAuth2Client, userId: string) => { - return new Promise(resolve => { + const refreshAccessToken = (credentials: Credentials, userId: string) => { + return new Promise(resolve => { let headerParameters = { headers: { 'Content-Type': 'application/x-www-form-urlencoded' } }; let queryParameters = { refreshToken: credentials.refresh_token, @@ -200,8 +202,7 @@ export namespace GoogleApiServerUtils { await Database.Auxiliary.GoogleAuthenticationToken.Update(userId, access_token, expiry_date); credentials.access_token = access_token; credentials.expiry_date = expiry_date; - oAuth2Client.setCredentials(credentials); - resolve({ token: credentials, client: oAuth2Client }); + resolve(credentials); }); }); }; diff --git a/src/server/apis/google/GooglePhotosUploadUtils.ts b/src/server/apis/google/GooglePhotosUploadUtils.ts index 4a67e57cc..d704faa71 100644 --- a/src/server/apis/google/GooglePhotosUploadUtils.ts +++ b/src/server/apis/google/GooglePhotosUploadUtils.ts @@ -20,19 +20,12 @@ export namespace GooglePhotosUploadUtils { } const prepend = (extension: string) => `https://photoslibrary.googleapis.com/v1/${extension}`; - const headers = (type: string) => ({ + const headers = (type: string, token: string) => ({ 'Content-Type': `application/${type}`, - 'Authorization': Bearer, + 'Authorization': token, }); - let Bearer: string; - - export const initialize = async (information: GoogleApiServerUtils.CredentialInformation) => { - const token = await GoogleApiServerUtils.RetrieveAccessToken(information); - Bearer = `Bearer ${token}`; - }; - - export const DispatchGooglePhotosUpload = async (url: string) => { + export const DispatchGooglePhotosUpload = async (bearerToken: string, url: string) => { if (!DashUploadUtils.imageFormats.includes(path.extname(url))) { return undefined; } @@ -40,7 +33,7 @@ export namespace GooglePhotosUploadUtils { const parameters = { method: 'POST', headers: { - ...headers('octet-stream'), + ...headers('octet-stream', bearerToken), 'X-Goog-Upload-File-Name': path.basename(url), 'X-Goog-Upload-Protocol': 'raw' }, @@ -56,13 +49,13 @@ export namespace GooglePhotosUploadUtils { })); }; - export const CreateMediaItems = async (newMediaItems: NewMediaItem[], album?: { id: string }): Promise => { + export const CreateMediaItems = async (bearerToken: string, newMediaItems: NewMediaItem[], album?: { id: string }): Promise => { const newMediaItemResults = await BatchedArray.from(newMediaItems, { batchSize: 50 }).batchedMapPatientInterval( { magnitude: 100, unit: TimeUnit.Milliseconds }, async (batch: NewMediaItem[]) => { const parameters = { method: 'POST', - headers: headers('json'), + headers: headers('json', bearerToken), uri: prepend('mediaItems:batchCreate'), body: { newMediaItems: batch } as any, json: true diff --git a/src/server/database.ts b/src/server/database.ts index 12626e594..79dd26b7d 100644 --- a/src/server/database.ts +++ b/src/server/database.ts @@ -298,7 +298,7 @@ export namespace Database { export type StoredCredentials = Credentials & { _id: string }; - export const Fetch = async (userId: string, removeId = true) => { + export const Fetch = async (userId: string, removeId = true): Promise> => { return SanitizedSingletonQuery({ userId }, GoogleAuthentication, removeId); }; diff --git a/src/server/index.ts b/src/server/index.ts index 3220a9533..24866a5e5 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -65,7 +65,7 @@ async function PreliminaryFunctions() { resolve(); }); }); - await GoogleApiServerUtils.LoadOAuthClient(); + await GoogleApiServerUtils.loadClientSecret(); await DashUploadUtils.createIfNotExists(pdfDirectory); await Database.tryInitializeConnection(); } @@ -553,8 +553,6 @@ function routeSetter(router: RouteManager) { } }); - const credentialsPath = path.join(__dirname, "./credentials/google_docs_credentials.json"); - const EndpointHandlerMap = new Map([ ["create", (api, params) => api.create(params)], ["retrieve", (api, params) => api.get(params)], @@ -588,11 +586,10 @@ function routeSetter(router: RouteManager) { onValidation: async ({ user, res }) => { const userId = user.id; const token = await Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId); - const information = { credentialsPath, userId }; if (!token) { - return res.send(await GoogleApiServerUtils.GenerateAuthenticationUrl(information)); + return res.send(await GoogleApiServerUtils.generateAuthenticationUrl()); } - GoogleApiServerUtils.RetrieveAccessToken(userId).then(token => res.send(token)); + GoogleApiServerUtils.retrieveAccessToken(userId).then(token => res.send(token)); } }); @@ -600,35 +597,28 @@ function routeSetter(router: RouteManager) { method: Method.POST, subscription: RouteStore.writeGoogleAccessToken, onValidation: async ({ user, req, res }) => { - res.send(await GoogleApiServerUtils.ProcessClientSideCode(user.id, req.body.authenticationCode)); + res.send(await GoogleApiServerUtils.processNewUser(user.id, req.body.authenticationCode)); } }); const tokenError = "Unable to successfully upload bytes for all images!"; const mediaError = "Unable to convert all uploaded bytes to media items!"; - const userIdError = "Unable to parse the identification of the user!"; router.addSupervisedRoute({ method: Method.POST, subscription: RouteStore.googlePhotosMediaUpload, onValidation: async ({ user, req, res }) => { const { media } = req.body; - const userId = user.id; - if (!userId) { - return _error(res, userIdError); - } - - await GooglePhotosUploadUtils.initialize({ credentialsPath, userId }); let failed: number[] = []; - + const token = await GoogleApiServerUtils.retrieveAccessToken(user.id); const newMediaItems = await BatchedArray.from(media, { batchSize: 25 }).batchedMapPatientInterval( { magnitude: 100, unit: TimeUnit.Milliseconds }, async (batch: GooglePhotosUploadUtils.MediaInput[]) => { const newMediaItems: NewMediaItem[] = []; for (let index = 0; index < batch.length; index++) { const element = batch[index]; - const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(element.url); + const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(token, element.url); if (!uploadToken) { failed.push(index); } else { @@ -647,7 +637,7 @@ function routeSetter(router: RouteManager) { console.error(`Unable to upload ${failedCount} image${failedCount === 1 ? "" : "s"} to Google's servers`); } - GooglePhotosUploadUtils.CreateMediaItems(newMediaItems, req.body.album).then( + GooglePhotosUploadUtils.CreateMediaItems(token, newMediaItems, req.body.album).then( result => _success(res, { results: result.newMediaItemResults, failed }), error => _error(res, mediaError, error) ); -- cgit v1.2.3-70-g09d2 From c4e832aa5c384c9d5f018ed1148cc003e988a45e Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sun, 27 Oct 2019 21:09:53 -0400 Subject: cleanup --- src/server/apis/google/GoogleApiServerUtils.ts | 115 +++++++++++++------------ 1 file changed, 58 insertions(+), 57 deletions(-) (limited to 'src') diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index 88f0f3377..5a6aa7abe 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -6,28 +6,68 @@ import { GlobalOptions } from "googleapis-common"; import { GaxiosResponse } from "gaxios"; import request = require('request-promise'); import * as qs from 'query-string'; -import Photos = require('googlephotos'); import { Database } from "../../database"; const path = require("path"); +const prefix = 'https://www.googleapis.com/auth/'; +const refreshEndpoint = "https://oauth2.googleapis.com/token"; +const SCOPES = [ + 'documents.readonly', + 'documents', + 'presentations', + 'presentations.readonly', + 'drive', + 'drive.file', + 'photoslibrary', + 'photoslibrary.appendonly', + 'photoslibrary.sharing', + 'userinfo.profile' +]; + /** * Server side authentication for Google Api queries. */ export namespace GoogleApiServerUtils { - const prefix = 'https://www.googleapis.com/auth/'; - const SCOPES = [ - 'documents.readonly', - 'documents', - 'presentations', - 'presentations.readonly', - 'drive', - 'drive.file', - 'photoslibrary', - 'photoslibrary.appendonly', - 'photoslibrary.sharing', - 'userinfo.profile' - ]; + export type EnrichedCredentials = Credentials & { userInfo: UserInfo }; + + export interface GoogleAuthenticationResult { + access_token: string; + avatar: string; + name: string; + } + + export interface UserInfo { + at_hash: string; + aud: string; + azp: string; + exp: number; + family_name: string; + given_name: string; + iat: number; + iss: string; + locale: string; + name: string; + picture: string; + sub: string; + } + + let installed: OAuth2ClientOptions; + let worker: OAuth2Client; + + export const loadClientSecret = async () => { + return new Promise((resolve, reject) => { + readFile(path.join(__dirname, "../../credentials/google_docs_credentials.json"), async (err, credentials) => { + if (err) { + reject(err); + return console.log('Error loading client secret file:', err); + } + installed = parseBuffer(credentials).installed; + worker = generateClient(); + resolve(); + }); + }); + }; const ClientMapping = new Map(); @@ -71,7 +111,7 @@ export namespace GoogleApiServerUtils { export const retrieveAccessToken = (userId: string): Promise => { return new Promise((resolve, reject) => { - retrieveCredentials(userId).then( + retrieveCurrentCredentials(userId).then( ({ access_token }) => resolve(access_token!), error => reject(`Error: unable to authenticate Google Photos API request.\n${error}`) ); @@ -80,34 +120,17 @@ export namespace GoogleApiServerUtils { export const retrieveOAuthClient = (userId: string): Promise => { return new Promise((resolve, reject) => { - retrieveCredentials(userId).then( + retrieveCurrentCredentials(userId).then( credentials => { const client = generateClient(); client.setCredentials(credentials); resolve(client); }, - error => reject(`Error: unable to authenticate Google Photos API request.\n${error}`) + error => reject(`Error: unable to instantiate and certify a new OAuth2 client.\n${error}`) ); }); }; - let installed: OAuth2ClientOptions; - let worker: OAuth2Client; - - export const loadClientSecret = async () => { - return new Promise((resolve, reject) => { - readFile(path.join(__dirname, "../../credentials/google_docs_credentials.json"), async (err, credentials) => { - if (err) { - reject(err); - return console.log('Error loading client secret file:', err); - } - installed = parseBuffer(credentials).installed; - worker = generateClient(); - resolve(); - }); - }); - }; - const generateClient = () => new google.auth.OAuth2(installed); export const generateAuthenticationUrl = async () => { @@ -117,11 +140,6 @@ export namespace GoogleApiServerUtils { }); }; - export interface GoogleAuthenticationResult { - access_token: string; - avatar: string; - name: string; - } export const processNewUser = async (userId: string, authenticationCode: string): Promise => { return new Promise((resolve, reject) => { worker.getToken(authenticationCode, async (err, token) => { @@ -154,23 +172,7 @@ export namespace GoogleApiServerUtils { return { ...credentials, userInfo }; }; - export type EnrichedCredentials = Credentials & { userInfo: UserInfo }; - export interface UserInfo { - at_hash: string; - aud: string; - azp: string; - exp: number; - family_name: string; - given_name: string; - iat: number; - iss: string; - locale: string; - name: string; - picture: string; - sub: string; - } - - const retrieveCredentials = async (userId: string): Promise => { + const retrieveCurrentCredentials = async (userId: string): Promise => { return new Promise((resolve, reject) => { Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId).then(credentials => { if (!credentials) { @@ -186,7 +188,6 @@ export namespace GoogleApiServerUtils { }); }; - const refreshEndpoint = "https://oauth2.googleapis.com/token"; const refreshAccessToken = (credentials: Credentials, userId: string) => { return new Promise(resolve => { let headerParameters = { headers: { 'Content-Type': 'application/x-www-form-urlencoded' } }; -- cgit v1.2.3-70-g09d2 From b217bd842356deace1e6620625b8f1841a9bce7b Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Mon, 28 Oct 2019 02:10:58 -0400 Subject: using client mapping --- src/server/apis/google/GoogleApiServerUtils.ts | 43 ++++++++++++++++++-------- 1 file changed, 30 insertions(+), 13 deletions(-) (limited to 'src') diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index 5a6aa7abe..ad7540e5d 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -37,6 +37,11 @@ export namespace GoogleApiServerUtils { name: string; } + export interface CredentialsResult { + credentials: Credentials; + refreshed: boolean; + } + export interface UserInfo { at_hash: string; aud: string; @@ -69,7 +74,7 @@ export namespace GoogleApiServerUtils { }); }; - const ClientMapping = new Map(); + const authenticationClients = new Map(); export const parseBuffer = (data: Buffer) => JSON.parse(data.toString()); @@ -111,8 +116,8 @@ export namespace GoogleApiServerUtils { export const retrieveAccessToken = (userId: string): Promise => { return new Promise((resolve, reject) => { - retrieveCurrentCredentials(userId).then( - ({ access_token }) => resolve(access_token!), + retrieveCredentials(userId).then( + ({ credentials }) => resolve(credentials.access_token!), error => reject(`Error: unable to authenticate Google Photos API request.\n${error}`) ); }); @@ -120,10 +125,14 @@ export namespace GoogleApiServerUtils { export const retrieveOAuthClient = (userId: string): Promise => { return new Promise((resolve, reject) => { - retrieveCurrentCredentials(userId).then( - credentials => { - const client = generateClient(); - client.setCredentials(credentials); + retrieveCredentials(userId).then( + ({ credentials, refreshed }) => { + let client = authenticationClients.get(userId); + if (!client) { + authenticationClients.set(userId, client = generateClientWith(credentials)); + } else if (refreshed) { + client.setCredentials(credentials); + } resolve(client); }, error => reject(`Error: unable to instantiate and certify a new OAuth2 client.\n${error}`) @@ -131,7 +140,15 @@ export namespace GoogleApiServerUtils { }); }; - const generateClient = () => new google.auth.OAuth2(installed); + function generateClient() { + return new google.auth.OAuth2(installed); + } + + function generateClientWith(credentials: Credentials) { + const client = new google.auth.OAuth2(installed); + client.setCredentials(credentials); + return client; + } export const generateAuthenticationUrl = async () => { return worker.generateAuthUrl({ @@ -172,8 +189,8 @@ export namespace GoogleApiServerUtils { return { ...credentials, userInfo }; }; - const retrieveCurrentCredentials = async (userId: string): Promise => { - return new Promise((resolve, reject) => { + const retrieveCredentials = async (userId: string): Promise => { + return new Promise((resolve, reject) => { Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId).then(credentials => { if (!credentials) { return reject(); @@ -183,13 +200,13 @@ export namespace GoogleApiServerUtils { return refreshAccessToken(credentials!, userId).then(resolve, reject); } // Authentication successful! - resolve(credentials); + resolve({ credentials, refreshed: false }); }); }); }; const refreshAccessToken = (credentials: Credentials, userId: string) => { - return new Promise(resolve => { + return new Promise(resolve => { let headerParameters = { headers: { 'Content-Type': 'application/x-www-form-urlencoded' } }; let queryParameters = { refreshToken: credentials.refresh_token, @@ -203,7 +220,7 @@ export namespace GoogleApiServerUtils { await Database.Auxiliary.GoogleAuthenticationToken.Update(userId, access_token, expiry_date); credentials.access_token = access_token; credentials.expiry_date = expiry_date; - resolve(credentials); + resolve({ credentials, refreshed: true }); }); }); }; -- cgit v1.2.3-70-g09d2 From 1f6e1d7e063f9ce1c08486f8c0c11b6c2c4198dc Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Mon, 28 Oct 2019 04:11:53 -0400 Subject: repaired google photos routine, no route handlers can have dangling promises --- src/server/ApiManagers/ExportManager.ts | 4 +- src/server/ApiManagers/UtilManager.ts | 36 ++-- src/server/RouteManager.ts | 1 + src/server/apis/google/GoogleApiServerUtils.ts | 9 +- src/server/apis/google/GooglePhotosUploadUtils.ts | 2 +- src/server/index.ts | 202 +++++++++++----------- 6 files changed, 137 insertions(+), 117 deletions(-) (limited to 'src') diff --git a/src/server/ApiManagers/ExportManager.ts b/src/server/ApiManagers/ExportManager.ts index 261acbbe0..14ac7dd5b 100644 --- a/src/server/ApiManagers/ExportManager.ts +++ b/src/server/ApiManagers/ExportManager.ts @@ -26,7 +26,7 @@ export default class ExportManager extends ApiManager { const id = req.params.docId; const hierarchy: Hierarchy = {}; await buildHierarchyRecursive(id, hierarchy); - BuildAndDispatchZip(res, zip => writeHierarchyRecursive(zip, hierarchy)); + return BuildAndDispatchZip(res, zip => writeHierarchyRecursive(zip, hierarchy)); } }); } @@ -48,7 +48,7 @@ export async function BuildAndDispatchZip(res: express.Response, mutator: ZipMut const zip = Archiver('zip'); zip.pipe(res); await mutator(zip); - zip.finalize(); + return zip.finalize(); } /** diff --git a/src/server/ApiManagers/UtilManager.ts b/src/server/ApiManagers/UtilManager.ts index a3f802b20..61cda2e9b 100644 --- a/src/server/ApiManagers/UtilManager.ts +++ b/src/server/ApiManagers/UtilManager.ts @@ -10,13 +10,16 @@ export default class UtilManager extends ApiManager { register({ method: Method.GET, subscription: "/pull", - onValidation: ({ res }) => { - exec('"C:\\Program Files\\Git\\git-bash.exe" -c "git pull"', err => { - if (err) { - res.send(err.message); - return; - } - res.redirect("/"); + onValidation: async ({ res }) => { + return new Promise(resolve => { + exec('"C:\\Program Files\\Git\\git-bash.exe" -c "git pull"', err => { + if (err) { + res.send(err.message); + return; + } + res.redirect("/"); + resolve(); + }); }); } }); @@ -24,14 +27,14 @@ export default class UtilManager extends ApiManager { register({ method: Method.GET, subscription: "/buxton", - onValidation: ({ res }) => { + onValidation: async ({ res }) => { let cwd = '../scraping/buxton'; let onResolved = (stdout: string) => { console.log(stdout); res.redirect("/"); }; let onRejected = (err: any) => { console.error(err.message); res.send(err); }; let tryPython3 = () => command_line('python3 scraper.py', cwd).then(onResolved, onRejected); - command_line('python scraper.py', cwd).then(onResolved, tryPython3); + return command_line('python scraper.py', cwd).then(onResolved, tryPython3); }, }); @@ -39,12 +42,15 @@ export default class UtilManager extends ApiManager { method: Method.GET, subscription: "/version", onValidation: ({ res }) => { - exec('"C:\\Program Files\\Git\\bin\\git.exe" rev-parse HEAD', (err, stdout) => { - if (err) { - res.send(err.message); - return; - } - res.send(stdout); + return new Promise(resolve => { + exec('"C:\\Program Files\\Git\\bin\\git.exe" rev-parse HEAD', (err, stdout) => { + if (err) { + res.send(err.message); + return; + } + res.send(stdout); + }); + resolve(); }); } }); diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts index ef083a88a..21ce9c9e4 100644 --- a/src/server/RouteManager.ts +++ b/src/server/RouteManager.ts @@ -75,6 +75,7 @@ export default class RouteManager { } setTimeout(() => { if (!res.headersSent) { + console.log("Initiating fallback for ", target); const warning = `request to ${target} fell through - this is a fallback response`; res.send({ warning }); } diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index ad7540e5d..1cca07036 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -62,12 +62,17 @@ export namespace GoogleApiServerUtils { export const loadClientSecret = async () => { return new Promise((resolve, reject) => { - readFile(path.join(__dirname, "../../credentials/google_docs_credentials.json"), async (err, credentials) => { + readFile(path.join(__dirname, "../../credentials/google_docs_credentials.json"), async (err, projectCredentials) => { if (err) { reject(err); return console.log('Error loading client secret file:', err); } - installed = parseBuffer(credentials).installed; + const { client_secret, client_id, redirect_uris } = parseBuffer(projectCredentials).installed; + installed = { + clientId: client_id, + clientSecret: client_secret, + redirectUri: redirect_uris[0] + }; worker = generateClient(); resolve(); }); diff --git a/src/server/apis/google/GooglePhotosUploadUtils.ts b/src/server/apis/google/GooglePhotosUploadUtils.ts index d704faa71..172fa8d46 100644 --- a/src/server/apis/google/GooglePhotosUploadUtils.ts +++ b/src/server/apis/google/GooglePhotosUploadUtils.ts @@ -22,7 +22,7 @@ export namespace GooglePhotosUploadUtils { const prepend = (extension: string) => `https://photoslibrary.googleapis.com/v1/${extension}`; const headers = (type: string, token: string) => ({ 'Content-Type': `application/${type}`, - 'Authorization': token, + 'Authorization': `Bearer ${token}`, }); export const DispatchGooglePhotosUpload = async (bearerToken: string, url: string) => { diff --git a/src/server/index.ts b/src/server/index.ts index 24866a5e5..eb19c71a9 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -225,55 +225,58 @@ function routeSetter(router: RouteManager) { } } }; - form.parse(req, async (err, fields, files) => { - remap = fields.remap !== "false"; - let id: string = ""; - try { - for (const name in files) { - const path_2 = files[name].path; - const zip = new AdmZip(path_2); - zip.getEntries().forEach((entry: any) => { - if (!entry.entryName.startsWith("files/")) return; - let dirname = path.dirname(entry.entryName) + "/"; - let extname = path.extname(entry.entryName); - let basename = path.basename(entry.entryName).split(".")[0]; - // zip.extractEntryTo(dirname + basename + "_o" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_s" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_m" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_l" + extname, __dirname + RouteStore.public, true, false); + return new Promise(resolve => { + form.parse(req, async (_err, fields, files) => { + remap = fields.remap !== "false"; + let id: string = ""; + try { + for (const name in files) { + const path_2 = files[name].path; + const zip = new AdmZip(path_2); + zip.getEntries().forEach((entry: any) => { + if (!entry.entryName.startsWith("files/")) return; + let dirname = path.dirname(entry.entryName) + "/"; + let extname = path.extname(entry.entryName); + let basename = path.basename(entry.entryName).split(".")[0]; + // zip.extractEntryTo(dirname + basename + "_o" + extname, __dirname + RouteStore.public, true, false); + // zip.extractEntryTo(dirname + basename + "_s" + extname, __dirname + RouteStore.public, true, false); + // zip.extractEntryTo(dirname + basename + "_m" + extname, __dirname + RouteStore.public, true, false); + // zip.extractEntryTo(dirname + basename + "_l" + extname, __dirname + RouteStore.public, true, false); + try { + zip.extractEntryTo(entry.entryName, __dirname + RouteStore.public, true, false); + dirname = "/" + dirname; + + fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_o" + extname)); + fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_s" + extname)); + fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_m" + extname)); + fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_l" + extname)); + } catch (e) { + console.log(e); + } + }); + const json = zip.getEntry("doc.json"); + let docs: any; try { - zip.extractEntryTo(entry.entryName, __dirname + RouteStore.public, true, false); - dirname = "/" + dirname; - - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_o" + extname)); - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_s" + extname)); - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_m" + extname)); - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_l" + extname)); - } catch (e) { - console.log(e); - } - }); - const json = zip.getEntry("doc.json"); - let docs: any; - try { - let data = JSON.parse(json.getData().toString("utf8")); - docs = data.docs; - id = data.id; - docs = Object.keys(docs).map(key => docs[key]); - docs.forEach(mapFn); - await Promise.all(docs.map((doc: any) => new Promise(res => Database.Instance.replace(doc.id, doc, (err, r) => { - err && console.log(err); - res(); - }, true, "newDocuments")))); - } catch (e) { console.log(e); } - fs.unlink(path_2, () => { }); - } - if (id) { - res.send(JSON.stringify(getId(id))); - } else { - res.send(JSON.stringify("error")); - } - } catch (e) { console.log(e); } + let data = JSON.parse(json.getData().toString("utf8")); + docs = data.docs; + id = data.id; + docs = Object.keys(docs).map(key => docs[key]); + docs.forEach(mapFn); + await Promise.all(docs.map((doc: any) => new Promise(res => Database.Instance.replace(doc.id, doc, (err, r) => { + err && console.log(err); + res(); + }, true, "newDocuments")))); + } catch (e) { console.log(e); } + fs.unlink(path_2, () => { }); + } + if (id) { + res.send(JSON.stringify(getId(id))); + } else { + res.send(JSON.stringify("error")); + } + } catch (e) { console.log(e); } + resolve(); + }); }); } }); @@ -285,22 +288,25 @@ function routeSetter(router: RouteManager) { let filename = req.params.filename; let noExt = filename.substring(0, filename.length - ".png".length); let pagenumber = parseInt(noExt.split('-')[1]); - fs.exists(uploadDirectory + filename, (exists: boolean) => { - console.log(`${uploadDirectory + filename} ${exists ? "exists" : "does not exist"}`); - if (exists) { - let input = fs.createReadStream(uploadDirectory + filename); - probe(input, (err: any, result: any) => { - if (err) { - console.log(err); - console.log(`error on ${filename}`); - return; - } - res.send({ path: "/files/" + filename, width: result.width, height: result.height }); - }); - } - else { - LoadPage(uploadDirectory + filename.substring(0, filename.length - noExt.split('-')[1].length - ".PNG".length - 1) + ".pdf", pagenumber, res); - } + return new Promise(resolve => { + fs.exists(uploadDirectory + filename, (exists: boolean) => { + console.log(`${uploadDirectory + filename} ${exists ? "exists" : "does not exist"}`); + if (exists) { + let input = fs.createReadStream(uploadDirectory + filename); + probe(input, (err: any, result: any) => { + if (err) { + console.log(err); + console.log(`error on ${filename}`); + return; + } + res.send({ path: "/files/" + filename, width: result.width, height: result.height }); + }); + } + else { + LoadPage(uploadDirectory + filename.substring(0, filename.length - noExt.split('-')[1].length - ".PNG".length - 1) + ".pdf", pagenumber, res); + } + resolve(); + }); }); } }); @@ -414,8 +420,8 @@ function routeSetter(router: RouteManager) { var canvas = createCanvas(width, height); var context = canvas.getContext('2d'); return { - canvas: canvas, - context: context, + canvas, + context }; } @@ -442,37 +448,39 @@ function routeSetter(router: RouteManager) { router.addSupervisedRoute({ method: Method.POST, subscription: RouteStore.upload, - onValidation: ({ req, res }) => { + onValidation: async ({ req, res }) => { let form = new formidable.IncomingForm(); form.uploadDir = uploadDirectory; form.keepExtensions = true; - form.parse(req, async (_err, _fields, files) => { - let results: ImageFileResponse[] = []; - for (const key in files) { - const { type, path: location, name } = files[key]; - const filename = path.basename(location); - let uploadInformation: Opt; - if (filename.endsWith(".pdf")) { - let dataBuffer = fs.readFileSync(uploadDirectory + filename); - const result: ParsedPDF = await pdf(dataBuffer); - await new Promise(resolve => { - const path = pdfDirectory + "/" + filename.substring(0, filename.length - ".pdf".length) + ".txt"; - fs.createWriteStream(path).write(result.text, error => { - if (!error) { - resolve(); - } else { - reject(error); - } + return new Promise(resolve => { + form.parse(req, async (_err, _fields, files) => { + let results: ImageFileResponse[] = []; + for (const key in files) { + const { type, path: location, name } = files[key]; + const filename = path.basename(location); + let uploadInformation: Opt; + if (filename.endsWith(".pdf")) { + let dataBuffer = fs.readFileSync(uploadDirectory + filename); + const result: ParsedPDF = await pdf(dataBuffer); + await new Promise(resolve => { + const path = pdfDirectory + "/" + filename.substring(0, filename.length - ".pdf".length) + ".txt"; + fs.createWriteStream(path).write(result.text, error => { + if (!error) { + resolve(); + } else { + reject(error); + } + }); }); - }); - } else { - uploadInformation = await DashUploadUtils.UploadImage(uploadDirectory + filename, filename); + } else { + uploadInformation = await DashUploadUtils.UploadImage(uploadDirectory + filename, filename); + } + const exif = uploadInformation ? uploadInformation.exifData : undefined; + results.push({ name, type, path: `/files/${filename}`, exif }); } - const exif = uploadInformation ? uploadInformation.exifData : undefined; - results.push({ name, type, path: `/files/${filename}`, exif }); - - } - _success(res, results); + _success(res, results); + resolve(); + }); }); } }); @@ -500,7 +508,7 @@ function routeSetter(router: RouteManager) { res.status(401).send("incorrect parameters specified"); return; } - imageDataUri.outputFile(uri, uploadDirectory + filename).then((savedName: string) => { + return imageDataUri.outputFile(uri, uploadDirectory + filename).then((savedName: string) => { const ext = path.extname(savedName); let resizers = [ { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, @@ -562,10 +570,10 @@ function routeSetter(router: RouteManager) { router.addSupervisedRoute({ method: Method.POST, subscription: new RouteSubscriber(RouteStore.googleDocs).add("sector", "action"), - onValidation: ({ req, res, user }) => { + onValidation: async ({ req, res, user }) => { let sector: GoogleApiServerUtils.Service = req.params.sector as GoogleApiServerUtils.Service; let action: GoogleApiServerUtils.Action = req.params.action as GoogleApiServerUtils.Action; - GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], user.id).then(endpoint => { + return GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], user.id).then(endpoint => { let handler = EndpointHandlerMap.get(action); if (endpoint && handler) { let execute = handler(endpoint, req.body).then( @@ -589,7 +597,7 @@ function routeSetter(router: RouteManager) { if (!token) { return res.send(await GoogleApiServerUtils.generateAuthenticationUrl()); } - GoogleApiServerUtils.retrieveAccessToken(userId).then(token => res.send(token)); + return GoogleApiServerUtils.retrieveAccessToken(userId).then(token => res.send(token)); } }); @@ -637,7 +645,7 @@ function routeSetter(router: RouteManager) { console.error(`Unable to upload ${failedCount} image${failedCount === 1 ? "" : "s"} to Google's servers`); } - GooglePhotosUploadUtils.CreateMediaItems(token, newMediaItems, req.body.album).then( + return GooglePhotosUploadUtils.CreateMediaItems(token, newMediaItems, req.body.album).then( result => _success(res, { results: result.newMediaItemResults, failed }), error => _error(res, mediaError, error) ); -- cgit v1.2.3-70-g09d2 From acea9d7aa984fe8b1eeac0546833d3dca3c844e3 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Mon, 28 Oct 2019 04:14:37 -0400 Subject: removed one-line functions --- src/server/apis/google/GoogleApiServerUtils.ts | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) (limited to 'src') diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index 1cca07036..92bb8d072 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -67,13 +67,12 @@ export namespace GoogleApiServerUtils { reject(err); return console.log('Error loading client secret file:', err); } - const { client_secret, client_id, redirect_uris } = parseBuffer(projectCredentials).installed; - installed = { + const { client_secret, client_id, redirect_uris } = JSON.parse(projectCredentials.toString()).installed; + worker = new google.auth.OAuth2({ clientId: client_id, clientSecret: client_secret, redirectUri: redirect_uris[0] - }; - worker = generateClient(); + }); resolve(); }); }); @@ -81,8 +80,6 @@ export namespace GoogleApiServerUtils { const authenticationClients = new Map(); - export const parseBuffer = (data: Buffer) => JSON.parse(data.toString()); - export enum Service { Documents = "Documents", Slides = "Slides" @@ -145,10 +142,6 @@ export namespace GoogleApiServerUtils { }); }; - function generateClient() { - return new google.auth.OAuth2(installed); - } - function generateClientWith(credentials: Credentials) { const client = new google.auth.OAuth2(installed); client.setCredentials(credentials); -- cgit v1.2.3-70-g09d2 From b259472385b03099380f22c7c19ae135b2adf30c Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Mon, 28 Oct 2019 04:17:06 -0400 Subject: rename --- src/server/apis/google/GoogleApiServerUtils.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'src') diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index 92bb8d072..4e5175a2b 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -157,12 +157,12 @@ export namespace GoogleApiServerUtils { export const processNewUser = async (userId: string, authenticationCode: string): Promise => { return new Promise((resolve, reject) => { - worker.getToken(authenticationCode, async (err, token) => { - if (err || !token) { + worker.getToken(authenticationCode, async (err, credentials) => { + if (err || !credentials) { reject(err); return console.error('Error retrieving access token', err); } - const enriched = injectUserInfo(token); + const enriched = injectUserInfo(credentials); await Database.Auxiliary.GoogleAuthenticationToken.Write(userId, enriched); const { given_name, picture } = enriched.userInfo; resolve({ -- cgit v1.2.3-70-g09d2 From c56b602e892707dbc7e22be2edba75f49a465ec7 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Mon, 28 Oct 2019 15:48:54 -0400 Subject: server utils beginning commenting --- src/server/apis/google/GoogleApiServerUtils.ts | 168 ++++++++++++++++++++----- 1 file changed, 139 insertions(+), 29 deletions(-) (limited to 'src') diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index 4e5175a2b..9071b0485 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -7,10 +7,21 @@ import { GaxiosResponse } from "gaxios"; import request = require('request-promise'); import * as qs from 'query-string'; import { Database } from "../../database"; -const path = require("path"); +import path from "path"; +/** + * + */ const prefix = 'https://www.googleapis.com/auth/'; + +/** + * + */ const refreshEndpoint = "https://oauth2.googleapis.com/token"; + +/** + * + */ const SCOPES = [ 'documents.readonly', 'documents', @@ -25,23 +36,31 @@ const SCOPES = [ ]; /** - * Server side authentication for Google Api queries. + * This namespace manages server side authentication for Google API queries, either + * from the standard v1 APIs or the Google Photos REST API. */ export namespace GoogleApiServerUtils { - export type EnrichedCredentials = Credentials & { userInfo: UserInfo }; - + /** + * + */ export interface GoogleAuthenticationResult { access_token: string; avatar: string; name: string; } + /** + * + */ export interface CredentialsResult { credentials: Credentials; refreshed: boolean; } + /** + * + */ export interface UserInfo { at_hash: string; aud: string; @@ -57,9 +76,74 @@ export namespace GoogleApiServerUtils { sub: string; } + /** + * + */ + export enum Service { + Documents = "Documents", + Slides = "Slides" + } + + /** + * + */ + export interface CredentialInformation { + credentialsPath: string; + userId: string; + } + + /** + * + */ let installed: OAuth2ClientOptions; + + /** + * This is a global authorization client that is never + * passed around, and whose credentials are never set. + * Its job is purely to generate new authentication urls + * (users will follow to get to Google's permissions GUI) + * and to use the codes returned from that process to generate the + * initial credentials. + */ let worker: OAuth2Client; + /** + * + */ + export type ApiResponse = Promise; + + /** + * + */ + export type ApiRouter = (endpoint: Endpoint, parameters: any) => ApiResponse; + + /** + * + */ + export type ApiHandler = (parameters: any, methodOptions?: any) => ApiResponse; + + /** + * + */ + export type Action = "create" | "retrieve" | "update"; + + /** + * + */ + export interface Endpoint { + get: ApiHandler; + create: ApiHandler; + batchUpdate: ApiHandler; + } + + /** + * + */ + export type EndpointParameters = GlobalOptions & { version: "v1" }; + + /** + * + */ export const loadClientSecret = async () => { return new Promise((resolve, reject) => { readFile(path.join(__dirname, "../../credentials/google_docs_credentials.json"), async (err, projectCredentials) => { @@ -68,36 +152,28 @@ export namespace GoogleApiServerUtils { return console.log('Error loading client secret file:', err); } const { client_secret, client_id, redirect_uris } = JSON.parse(projectCredentials.toString()).installed; - worker = new google.auth.OAuth2({ + // initialize the global authorization client + installed = { clientId: client_id, clientSecret: client_secret, redirectUri: redirect_uris[0] - }); + }; + worker = generateClient(); resolve(); }); }); }; + /** + * + */ const authenticationClients = new Map(); - export enum Service { - Documents = "Documents", - Slides = "Slides" - } - - export interface CredentialInformation { - credentialsPath: string; - userId: string; - } - - export type ApiResponse = Promise; - export type ApiRouter = (endpoint: Endpoint, parameters: any) => ApiResponse; - export type ApiHandler = (parameters: any, methodOptions?: any) => ApiResponse; - export type Action = "create" | "retrieve" | "update"; - - export type Endpoint = { get: ApiHandler, create: ApiHandler, batchUpdate: ApiHandler }; - export type EndpointParameters = GlobalOptions & { version: "v1" }; - + /** + * + * @param sector + * @param userId + */ export const GetEndpoint = (sector: string, userId: string) => { return new Promise>(resolve => { retrieveOAuthClient(userId).then(auth => { @@ -116,6 +192,10 @@ export namespace GoogleApiServerUtils { }); }; + /** + * + * @param userId + */ export const retrieveAccessToken = (userId: string): Promise => { return new Promise((resolve, reject) => { retrieveCredentials(userId).then( @@ -125,13 +205,17 @@ export namespace GoogleApiServerUtils { }); }; + /** + * + * @param userId + */ export const retrieveOAuthClient = (userId: string): Promise => { return new Promise((resolve, reject) => { retrieveCredentials(userId).then( ({ credentials, refreshed }) => { let client = authenticationClients.get(userId); if (!client) { - authenticationClients.set(userId, client = generateClientWith(credentials)); + authenticationClients.set(userId, client = generateClient(credentials)); } else if (refreshed) { client.setCredentials(credentials); } @@ -142,12 +226,19 @@ export namespace GoogleApiServerUtils { }); }; - function generateClientWith(credentials: Credentials) { + /** + * + * @param credentials + */ + function generateClient(credentials?: Credentials) { const client = new google.auth.OAuth2(installed); - client.setCredentials(credentials); + credentials && client.setCredentials(credentials); return client; } + /** + * + */ export const generateAuthenticationUrl = async () => { return worker.generateAuthUrl({ access_type: 'offline', @@ -155,6 +246,11 @@ export namespace GoogleApiServerUtils { }); }; + /** + * + * @param userId + * @param authenticationCode + */ export const processNewUser = async (userId: string, authenticationCode: string): Promise => { return new Promise((resolve, reject) => { worker.getToken(authenticationCode, async (err, credentials) => { @@ -174,6 +270,11 @@ export namespace GoogleApiServerUtils { }); }; + /** + * + */ + export type EnrichedCredentials = Credentials & { userInfo: UserInfo }; + /** * It's pretty cool: the credentials id_token is split into thirds by periods. * The middle third contains a base64-encoded JSON string with all the @@ -187,6 +288,10 @@ export namespace GoogleApiServerUtils { return { ...credentials, userInfo }; }; + /** + * + * @param userId + */ const retrieveCredentials = async (userId: string): Promise => { return new Promise((resolve, reject) => { Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId).then(credentials => { @@ -203,13 +308,18 @@ export namespace GoogleApiServerUtils { }); }; + /** + * + * @param credentials + * @param userId + */ const refreshAccessToken = (credentials: Credentials, userId: string) => { return new Promise(resolve => { let headerParameters = { headers: { 'Content-Type': 'application/x-www-form-urlencoded' } }; let queryParameters = { refreshToken: credentials.refresh_token, - ...installed, - grant_type: "refresh_token" + grant_type: "refresh_token", + ...installed }; let url = `${refreshEndpoint}?${qs.stringify(queryParameters)}`; request.post(url, headerParameters).then(async response => { -- cgit v1.2.3-70-g09d2 From c1c919d4d44a40d59f2ec714c143cd8f03ad3481 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Mon, 28 Oct 2019 20:04:51 -0400 Subject: clean up --- src/server/apis/google/GoogleApiServerUtils.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index 9071b0485..884487509 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -298,9 +298,9 @@ export namespace GoogleApiServerUtils { if (!credentials) { return reject(); } - if (credentials!.expiry_date! < new Date().getTime()) { + if (credentials.expiry_date! < new Date().getTime()) { // Token has expired, so submitting a request for a refreshed access token - return refreshAccessToken(credentials!, userId).then(resolve, reject); + return refreshAccessToken(credentials, userId).then(resolve, reject); } // Authentication successful! resolve({ credentials, refreshed: false }); -- cgit v1.2.3-70-g09d2 From ba7568e4fe2e9323a66a91876305f829487bffb9 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 29 Oct 2019 16:00:21 -0400 Subject: beginning commenting --- src/server/apis/google/GoogleApiServerUtils.ts | 57 ++++++++++++++++++-------- 1 file changed, 40 insertions(+), 17 deletions(-) (limited to 'src') diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index 884487509..b9984649e 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -41,15 +41,6 @@ const SCOPES = [ */ export namespace GoogleApiServerUtils { - /** - * - */ - export interface GoogleAuthenticationResult { - access_token: string; - avatar: string; - name: string; - } - /** * */ @@ -247,9 +238,30 @@ export namespace GoogleApiServerUtils { }; /** - * - * @param userId - * @param authenticationCode + * This is what we return to the server in processNewUser(), after the + * worker OAuth2Client has used the user-pasted authentication code + * to retrieve an access token and an info token. The avatar is the + * URL to the Google-hosted mono-color, single white letter profile 'image'. + */ + export interface GoogleAuthenticationResult { + access_token: string; + avatar: string; + name: string; + } + + /** + * This method receives the authentication code that the + * user pasted into the overlay in the client side and uses the worker + * and the authentication code to fetch the full set of credentials that + * we'll store in the database for each user. This is called once per + * new account integration. + * @param userId The Dash user id of the user requesting account integration, used to associate the new credentials + * with a Dash user in the googleAuthentication table of the database. + * @param authenticationCode the Google-provided authentication code that the user copied + * from Google's permissions UI and pasted into the overlay. + * @returns the information necessary to authenticate a client side google photos request + * and display basic user information in the overlay on successful authentication. + * This can be expanded as needed by adding properties to the interface GoogleAuthenticationResult. */ export const processNewUser = async (userId: string, authenticationCode: string): Promise => { return new Promise((resolve, reject) => { @@ -271,7 +283,9 @@ export namespace GoogleApiServerUtils { }; /** - * + * This type represents the union of the full set of OAuth2 credentials + * and all of a Google user's publically available information. This is the strucure + * of the JSON object we ultimately store in the googleAuthentication table of the database. */ export type EnrichedCredentials = Credentials & { userInfo: UserInfo }; @@ -282,6 +296,8 @@ export namespace GoogleApiServerUtils { * base64 decode with atob and parse the JSON. * @param credentials the client credentials returned from OAuth after the user * has executed the authentication routine + * @returns the full set of credentials in the structure in which they'll be stored + * in the database. */ const injectUserInfo = (credentials: Credentials): EnrichedCredentials => { const userInfo = JSON.parse(atob(credentials.id_token!.split(".")[1])); @@ -289,8 +305,12 @@ export namespace GoogleApiServerUtils { }; /** - * - * @param userId + * Looks in the database for any credentials object with the given user id, + * and returns them. If the credentials are found but expired, the function will + * automatically refresh the credentials and then resolve with the updated values. + * @param userId the id of the Dash user requesting his/her credentials. Eventually + * might have multiple. + * @returns the credentials and whether or not they were updated in the process */ const retrieveCredentials = async (userId: string): Promise => { return new Promise((resolve, reject) => { @@ -309,8 +329,11 @@ export namespace GoogleApiServerUtils { }; /** - * - * @param credentials + * This function submits a request to OAuth with the local refresh token + * to revalidate the credentials for a given Google user associated with + * the Dash user id passed in. In addition to returning the credentials, it + * writes the diff to the database. + * @param credentials the credentials * @param userId */ const refreshAccessToken = (credentials: Credentials, userId: string) => { -- cgit v1.2.3-70-g09d2 From 3db5ea503754b74681f44ebffeb251dfad8ee65e Mon Sep 17 00:00:00 2001 From: Mohammad Amoush Date: Tue, 29 Oct 2019 17:05:01 -0400 Subject: a few comments --- src/server/Initialization.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) (limited to 'src') diff --git a/src/server/Initialization.ts b/src/server/Initialization.ts index fbb5ae7a6..3ea8f2085 100644 --- a/src/server/Initialization.ts +++ b/src/server/Initialization.ts @@ -19,6 +19,8 @@ import * as whm from 'webpack-hot-middleware'; import * as fs from 'fs'; import * as request from 'request'; +/* RouteSetter is a wrapper around the server that prevents the server + from being exposed. */ export type RouteSetter = (server: RouteManager) => void; export interface InitializationOptions { listenAtPort: number; @@ -38,7 +40,7 @@ export default async function InitializeServer(options: InitializationOptions) { registerAuthenticationRoutes(server); registerCorsProxy(server); - const isRelease = determineEnvironment(); + const isRelease = determineEnvironment(); //vs. dev mode routeSetter(new RouteManager(server, isRelease)); server.listen(listenAtPort, () => console.log(`server started at http://localhost:${listenAtPort}`)); @@ -73,6 +75,7 @@ function buildWithMiddleware(server: express.Express) { return server; } +/* Determine if the enviroment is dev mode or release mode. */ function determineEnvironment() { const isRelease = process.env.RELEASE === "true"; -- cgit v1.2.3-70-g09d2 From af25eaf2a848278a58f0993cba2e68c05da0760c Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 29 Oct 2019 20:00:54 -0400 Subject: comments and fixes for google photos server sid --- src/client/Network.ts | 12 +- src/client/apis/GoogleAuthenticationManager.tsx | 39 ++--- .../apis/google_docs/GoogleApiClientUtils.ts | 8 +- .../apis/google_docs/GooglePhotosClientUtils.ts | 10 +- .../util/Import & Export/DirectoryImportBox.tsx | 5 +- src/client/util/Import & Export/ImageUtils.ts | 4 +- src/new_fields/RichTextUtils.ts | 4 +- src/server/RouteManager.ts | 4 +- src/server/RouteStore.ts | 3 +- src/server/apis/google/GoogleApiServerUtils.ts | 171 +++++++++++---------- src/server/index.ts | 14 +- 11 files changed, 147 insertions(+), 127 deletions(-) (limited to 'src') diff --git a/src/client/Network.ts b/src/client/Network.ts index 75ccb5e99..f9ef27267 100644 --- a/src/client/Network.ts +++ b/src/client/Network.ts @@ -1,18 +1,16 @@ import { Utils } from "../Utils"; -import { CurrentUserUtils } from "../server/authentication/models/current_user_utils"; import requestPromise = require('request-promise'); -export namespace Identified { +export namespace Networking { export async function FetchFromServer(relativeRoute: string) { - return (await fetch(relativeRoute, { headers: { userId: CurrentUserUtils.id } })).text(); + return (await fetch(relativeRoute)).text(); } export async function PostToServer(relativeRoute: string, body?: any) { let options = { uri: Utils.prepend(relativeRoute), method: "POST", - headers: { userId: CurrentUserUtils.id }, body, json: true }; @@ -22,12 +20,10 @@ export namespace Identified { export async function PostFormDataToServer(relativeRoute: string, formData: FormData) { const parameters = { method: 'POST', - headers: { userId: CurrentUserUtils.id }, - body: formData, + body: formData }; const response = await fetch(relativeRoute, parameters); - const text = await response.json(); - return text; + return response.json(); } } \ No newline at end of file diff --git a/src/client/apis/GoogleAuthenticationManager.tsx b/src/client/apis/GoogleAuthenticationManager.tsx index 01dac3996..1ec9d8412 100644 --- a/src/client/apis/GoogleAuthenticationManager.tsx +++ b/src/client/apis/GoogleAuthenticationManager.tsx @@ -3,7 +3,7 @@ import { observer } from "mobx-react"; import * as React from "react"; import MainViewModal from "../views/MainViewModal"; import { Opt } from "../../new_fields/Doc"; -import { Identified } from "../Network"; +import { Networking } from "../Network"; import { RouteStore } from "../../server/RouteStore"; import "./GoogleAuthenticationManager.scss"; @@ -31,7 +31,7 @@ export default class GoogleAuthenticationManager extends React.Component<{}> { } public fetchOrGenerateAccessToken = async () => { - let response = await Identified.FetchFromServer(RouteStore.readGoogleAccessToken); + let response = await Networking.FetchFromServer(RouteStore.readGoogleAccessToken); // if this is an authentication url, activate the UI to register the new access token if (new RegExp(AuthenticationUrl).test(response)) { this.isOpen = true; @@ -39,24 +39,25 @@ export default class GoogleAuthenticationManager extends React.Component<{}> { return new Promise(async resolve => { const disposer = reaction( () => this.authenticationCode, - authenticationCode => { - if (authenticationCode) { - Identified.PostToServer(RouteStore.writeGoogleAccessToken, { authenticationCode }).then( - ({ access_token, avatar, name }) => { - runInAction(() => { - this.avatar = avatar; - this.username = name; - }); - this.beginFadeout(); - disposer(); - resolve(access_token); - }, - action(() => { - this.hasBeenClicked = false; - this.success = false; - }) - ); + async authenticationCode => { + if (!authenticationCode) { + return; } + const { access_token, avatar, name } = await Networking.PostToServer( + RouteStore.writeGoogleAccessToken, + { authenticationCode } + ); + runInAction(() => { + this.avatar = avatar; + this.username = name; + }); + this.beginFadeout(); + disposer(); + resolve(access_token); + action(() => { + this.hasBeenClicked = false; + this.success = false; + }); } ); }); diff --git a/src/client/apis/google_docs/GoogleApiClientUtils.ts b/src/client/apis/google_docs/GoogleApiClientUtils.ts index 1cf01fc3d..183679317 100644 --- a/src/client/apis/google_docs/GoogleApiClientUtils.ts +++ b/src/client/apis/google_docs/GoogleApiClientUtils.ts @@ -3,7 +3,7 @@ import { RouteStore } from "../../../server/RouteStore"; import { Opt } from "../../../new_fields/Doc"; import { isArray } from "util"; import { EditorState } from "prosemirror-state"; -import { Identified } from "../../Network"; +import { Networking } from "../../Network"; export const Pulls = "googleDocsPullCount"; export const Pushes = "googleDocsPushCount"; @@ -84,7 +84,7 @@ export namespace GoogleApiClientUtils { } }; try { - const schema: docs_v1.Schema$Document = await Identified.PostToServer(path, parameters); + const schema: docs_v1.Schema$Document = await Networking.PostToServer(path, parameters); return schema.documentId; } catch { return undefined; @@ -157,7 +157,7 @@ export namespace GoogleApiClientUtils { const path = `${RouteStore.googleDocs}/Documents/${Actions.Retrieve}`; try { const parameters = { documentId: options.documentId }; - const schema: RetrievalResult = await Identified.PostToServer(path, parameters); + const schema: RetrievalResult = await Networking.PostToServer(path, parameters); return schema; } catch { return undefined; @@ -173,7 +173,7 @@ export namespace GoogleApiClientUtils { } }; try { - const replies: UpdateResult = await Identified.PostToServer(path, parameters); + const replies: UpdateResult = await Networking.PostToServer(path, parameters); return replies; } catch { return undefined; diff --git a/src/client/apis/google_docs/GooglePhotosClientUtils.ts b/src/client/apis/google_docs/GooglePhotosClientUtils.ts index e93fa6eb4..402fc64b5 100644 --- a/src/client/apis/google_docs/GooglePhotosClientUtils.ts +++ b/src/client/apis/google_docs/GooglePhotosClientUtils.ts @@ -13,7 +13,7 @@ import { Docs, DocumentOptions } from "../../documents/Documents"; import { NewMediaItemResult, MediaItem } from "../../../server/apis/google/SharedTypes"; import { AssertionError } from "assert"; import { DocumentView } from "../../views/nodes/DocumentView"; -import { Identified } from "../../Network"; +import { Networking } from "../../Network"; import GoogleAuthenticationManager from "../GoogleAuthenticationManager"; export namespace GooglePhotos { @@ -78,6 +78,7 @@ export namespace GooglePhotos { } export const CollectionToAlbum = async (options: AlbumCreationOptions): Promise> => { + await GoogleAuthenticationManager.Instance.fetchOrGenerateAccessToken(); const { collection, title, descriptionKey, tag } = options; const dataDocument = Doc.GetProto(collection); const images = ((await DocListCastAsync(dataDocument.data)) || []).filter(doc => Cast(doc.data, ImageField)); @@ -127,6 +128,7 @@ export namespace GooglePhotos { export type CollectionConstructor = (data: Array, options: DocumentOptions, ...args: any) => Doc; export const CollectionFromSearch = async (constructor: CollectionConstructor, requested: Opt>): Promise => { + await GoogleAuthenticationManager.Instance.fetchOrGenerateAccessToken(); let response = await Query.ContentSearch(requested); let uploads = await Transactions.WriteMediaItemsToServer(response); const children = uploads.map((upload: Transactions.UploadInformation) => { @@ -147,6 +149,7 @@ export namespace GooglePhotos { const comparator = (a: string, b: string) => (a < b) ? -1 : (a > b ? 1 : 0); export const TagChildImages = async (collection: Doc) => { + await GoogleAuthenticationManager.Instance.fetchOrGenerateAccessToken(); const idMapping = await Cast(collection.googlePhotosIdMapping, Doc); if (!idMapping) { throw new Error("Appending image metadata requires that the targeted collection have already been mapped to an album!"); @@ -304,7 +307,7 @@ export namespace GooglePhotos { }; export const WriteMediaItemsToServer = async (body: { mediaItems: any[] }): Promise => { - const uploads = await Identified.PostToServer(RouteStore.googlePhotosMediaDownload, body); + const uploads = await Networking.PostToServer(RouteStore.googlePhotosMediaDownload, body); return uploads; }; @@ -325,6 +328,7 @@ export namespace GooglePhotos { } export const UploadImages = async (sources: Doc[], album?: AlbumReference, descriptionKey = "caption"): Promise> => { + await GoogleAuthenticationManager.Instance.fetchOrGenerateAccessToken(); if (album && "title" in album) { album = await Create.Album(album.title); } @@ -341,7 +345,7 @@ export namespace GooglePhotos { media.push({ url, description }); } if (media.length) { - const results = await Identified.PostToServer(RouteStore.googlePhotosMediaUpload, { media, album }); + const results = await Networking.PostToServer(RouteStore.googlePhotosMediaUpload, { media, album }); return results; } }; diff --git a/src/client/util/Import & Export/DirectoryImportBox.tsx b/src/client/util/Import & Export/DirectoryImportBox.tsx index d74b51993..2d1b6fe20 100644 --- a/src/client/util/Import & Export/DirectoryImportBox.tsx +++ b/src/client/util/Import & Export/DirectoryImportBox.tsx @@ -20,9 +20,8 @@ import { listSpec } from "../../../new_fields/Schema"; import { GooglePhotos } from "../../apis/google_docs/GooglePhotosClientUtils"; import { SchemaHeaderField } from "../../../new_fields/SchemaHeaderField"; import "./DirectoryImportBox.scss"; -import { Identified } from "../../Network"; +import { Networking } from "../../Network"; import { BatchedArray } from "array-batcher"; -import { ExifData } from "exif"; const unsupported = ["text/html", "text/plain"]; @@ -117,7 +116,7 @@ export default class DirectoryImportBox extends React.Component formData.append(Utils.GenerateGuid(), file); }); - const responses = await Identified.PostFormDataToServer(RouteStore.upload, formData); + const responses = await Networking.PostFormDataToServer(RouteStore.upload, formData); runInAction(() => this.completed += batch.length); return responses as ImageUploadResponse[]; }); diff --git a/src/client/util/Import & Export/ImageUtils.ts b/src/client/util/Import & Export/ImageUtils.ts index c9abf38fa..914f4870a 100644 --- a/src/client/util/Import & Export/ImageUtils.ts +++ b/src/client/util/Import & Export/ImageUtils.ts @@ -3,7 +3,7 @@ import { ImageField } from "../../../new_fields/URLField"; import { Cast, StrCast } from "../../../new_fields/Types"; import { RouteStore } from "../../../server/RouteStore"; import { Docs } from "../../documents/Documents"; -import { Identified } from "../../Network"; +import { Networking } from "../../Network"; import { Id } from "../../../new_fields/FieldSymbols"; import { Utils } from "../../../Utils"; @@ -15,7 +15,7 @@ export namespace ImageUtils { return false; } const source = field.url.href; - const response = await Identified.PostToServer(RouteStore.inspectImage, { source }); + const response = await Networking.PostToServer(RouteStore.inspectImage, { source }); const { error, data } = response.exifData; document.exif = error || Docs.Get.DocumentHierarchyFromJson(data); return data !== undefined; diff --git a/src/new_fields/RichTextUtils.ts b/src/new_fields/RichTextUtils.ts index 601939ed2..63d718ce8 100644 --- a/src/new_fields/RichTextUtils.ts +++ b/src/new_fields/RichTextUtils.ts @@ -17,7 +17,7 @@ import { Cast, StrCast } from "./Types"; import { Id } from "./FieldSymbols"; import { DocumentView } from "../client/views/nodes/DocumentView"; import { AssertionError } from "assert"; -import { Identified } from "../client/Network"; +import { Networking } from "../client/Network"; export namespace RichTextUtils { @@ -129,7 +129,7 @@ export namespace RichTextUtils { return { baseUrl, filename }; }); - const uploads = await Identified.PostToServer(RouteStore.googlePhotosMediaDownload, { mediaItems }); + const uploads = await Networking.PostToServer(RouteStore.googlePhotosMediaDownload, { mediaItems }); if (uploads.length !== mediaItems.length) { throw new AssertionError({ expected: mediaItems.length, actual: uploads.length, message: "Error with internally uploading inlineObjects!" }); diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts index 21ce9c9e4..eda2a49d2 100644 --- a/src/server/RouteManager.ts +++ b/src/server/RouteManager.ts @@ -49,9 +49,9 @@ export default class RouteManager { let supervised = async (req: express.Request, res: express.Response) => { const { user, originalUrl: target } = req; const core = { req, res, isRelease }; - const tryExecute = async (target: (args: any) => any | Promise, args: any) => { + const tryExecute = async (toExecute: (args: any) => any | Promise, args: any) => { try { - await target(args); + await toExecute(args); } catch (e) { if (onError) { onError({ ...core, error: e }); diff --git a/src/server/RouteStore.ts b/src/server/RouteStore.ts index de2553b2f..a310d0c95 100644 --- a/src/server/RouteStore.ts +++ b/src/server/RouteStore.ts @@ -39,6 +39,7 @@ export enum RouteStore { writeGoogleAccessToken = "/writeGoogleAccessToken", googlePhotosMediaUpload = "/googlePhotosMediaUpload", googlePhotosMediaDownload = "/googlePhotosMediaDownload", - googleDocsGet = "/googleDocsGet" + googleDocsGet = "/googleDocsGet", + checkGoogle = "/checkGoogleAuthentication" } \ No newline at end of file diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index b9984649e..ff5dc7081 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -7,7 +7,7 @@ import { GaxiosResponse } from "gaxios"; import request = require('request-promise'); import * as qs from 'query-string'; import { Database } from "../../database"; -import path from "path"; +import * as path from "path"; /** * @@ -45,7 +45,7 @@ export namespace GoogleApiServerUtils { * */ export interface CredentialsResult { - credentials: Credentials; + credentials: Opt; refreshed: boolean; } @@ -135,8 +135,8 @@ export namespace GoogleApiServerUtils { /** * */ - export const loadClientSecret = async () => { - return new Promise((resolve, reject) => { + export async function loadClientSecret(): Promise { + return new Promise((resolve, reject) => { readFile(path.join(__dirname, "../../credentials/google_docs_credentials.json"), async (err, projectCredentials) => { if (err) { reject(err); @@ -153,7 +153,7 @@ export namespace GoogleApiServerUtils { resolve(); }); }); - }; + } /** * @@ -165,9 +165,12 @@ export namespace GoogleApiServerUtils { * @param sector * @param userId */ - export const GetEndpoint = (sector: string, userId: string) => { - return new Promise>(resolve => { + export async function GetEndpoint(sector: string, userId: string): Promise> { + return new Promise(resolve => { retrieveOAuthClient(userId).then(auth => { + if (!auth) { + return resolve(); + } let routed: Opt; let parameters: EndpointParameters = { auth, version: "v1" }; switch (sector) { @@ -181,29 +184,38 @@ export namespace GoogleApiServerUtils { resolve(routed); }); }); - }; + } /** * * @param userId */ - export const retrieveAccessToken = (userId: string): Promise => { - return new Promise((resolve, reject) => { + export async function retrieveAccessToken(userId: string): Promise { + return new Promise(resolve => { retrieveCredentials(userId).then( - ({ credentials }) => resolve(credentials.access_token!), - error => reject(`Error: unable to authenticate Google Photos API request.\n${error}`) + ({ credentials }) => { + if (credentials) { + return resolve(credentials.access_token!); + } + resolve(); + } ); }); - }; + } /** - * - * @param userId + * Returns an initialized OAuth2 client instance, likely to be passed into Google's + * npm-installed API wrappers that use authenticated client instances rather than access codes for + * security. + * @param userId the Dash user id of the user requesting account integration */ - export const retrieveOAuthClient = (userId: string): Promise => { - return new Promise((resolve, reject) => { + export async function retrieveOAuthClient(userId: string): Promise { + return new Promise((resolve, reject) => { retrieveCredentials(userId).then( ({ credentials, refreshed }) => { + if (!credentials) { + return resolve(); + } let client = authenticationClients.get(userId); if (!client) { authenticationClients.set(userId, client = generateClient(credentials)); @@ -211,31 +223,34 @@ export namespace GoogleApiServerUtils { client.setCredentials(credentials); } resolve(client); - }, - error => reject(`Error: unable to instantiate and certify a new OAuth2 client.\n${error}`) + } ); }); - }; + } /** - * - * @param credentials + * Creates a new OAuth2Client instance, and if provided, sets + * the specific credentials on the client + * @param credentials if you have access to the credentials that you'll eventually set on + * the client, just pass them in at initialization */ - function generateClient(credentials?: Credentials) { + function generateClient(credentials?: Credentials): OAuth2Client { const client = new google.auth.OAuth2(installed); credentials && client.setCredentials(credentials); return client; } /** - * + * Calls on the worker (which does not have and does not need + * any credentials) to produce a url to which the user can + * navigate to give Dash the necessary Google permissions. */ - export const generateAuthenticationUrl = async () => { + export function generateAuthenticationUrl(): string { return worker.generateAuthUrl({ access_type: 'offline', scope: SCOPES.map(relative => prefix + relative), }); - }; + } /** * This is what we return to the server in processNewUser(), after the @@ -255,7 +270,7 @@ export namespace GoogleApiServerUtils { * and the authentication code to fetch the full set of credentials that * we'll store in the database for each user. This is called once per * new account integration. - * @param userId The Dash user id of the user requesting account integration, used to associate the new credentials + * @param userId the Dash user id of the user requesting account integration, used to associate the new credentials * with a Dash user in the googleAuthentication table of the database. * @param authenticationCode the Google-provided authentication code that the user copied * from Google's permissions UI and pasted into the overlay. @@ -263,24 +278,25 @@ export namespace GoogleApiServerUtils { * and display basic user information in the overlay on successful authentication. * This can be expanded as needed by adding properties to the interface GoogleAuthenticationResult. */ - export const processNewUser = async (userId: string, authenticationCode: string): Promise => { - return new Promise((resolve, reject) => { + export async function processNewUser(userId: string, authenticationCode: string): Promise { + const credentials = await new Promise((resolve, reject) => { worker.getToken(authenticationCode, async (err, credentials) => { if (err || !credentials) { reject(err); - return console.error('Error retrieving access token', err); + return; } - const enriched = injectUserInfo(credentials); - await Database.Auxiliary.GoogleAuthenticationToken.Write(userId, enriched); - const { given_name, picture } = enriched.userInfo; - resolve({ - access_token: enriched.access_token!, - avatar: picture, - name: given_name - }); + resolve(credentials); }); }); - }; + const enriched = injectUserInfo(credentials); + await Database.Auxiliary.GoogleAuthenticationToken.Write(userId, enriched); + const { given_name, picture } = enriched.userInfo; + return { + access_token: enriched.access_token!, + avatar: picture, + name: given_name + }; + } /** * This type represents the union of the full set of OAuth2 credentials @@ -299,34 +315,31 @@ export namespace GoogleApiServerUtils { * @returns the full set of credentials in the structure in which they'll be stored * in the database. */ - const injectUserInfo = (credentials: Credentials): EnrichedCredentials => { + function injectUserInfo(credentials: Credentials): EnrichedCredentials { const userInfo = JSON.parse(atob(credentials.id_token!.split(".")[1])); return { ...credentials, userInfo }; - }; + } /** * Looks in the database for any credentials object with the given user id, * and returns them. If the credentials are found but expired, the function will * automatically refresh the credentials and then resolve with the updated values. - * @param userId the id of the Dash user requesting his/her credentials. Eventually - * might have multiple. - * @returns the credentials and whether or not they were updated in the process + * @param userId the id of the Dash user requesting his/her credentials. Eventually, each user might + * be associated with multiple different sets of Google credentials. + * @returns the credentials and a flag indicating whether or not they were refreshed during retrieval */ - const retrieveCredentials = async (userId: string): Promise => { - return new Promise((resolve, reject) => { - Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId).then(credentials => { - if (!credentials) { - return reject(); - } - if (credentials.expiry_date! < new Date().getTime()) { - // Token has expired, so submitting a request for a refreshed access token - return refreshAccessToken(credentials, userId).then(resolve, reject); - } - // Authentication successful! - resolve({ credentials, refreshed: false }); - }); - }); - }; + async function retrieveCredentials(userId: string): Promise { + let credentials: Opt = await Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId); + let refreshed = false; + if (!credentials) { + return { credentials: undefined, refreshed }; + } + // if the token has expired, submit a request for a refreshed access token + if (credentials.expiry_date! <= new Date().getTime()) { + credentials = await refreshAccessToken(credentials, userId); + } + return { credentials, refreshed }; + } /** * This function submits a request to OAuth with the local refresh token @@ -334,26 +347,28 @@ export namespace GoogleApiServerUtils { * the Dash user id passed in. In addition to returning the credentials, it * writes the diff to the database. * @param credentials the credentials - * @param userId + * @param userId the id of the Dash user implicitly requesting that + * his/her credentials be refreshed + * @returns the updated credentials */ - const refreshAccessToken = (credentials: Credentials, userId: string) => { - return new Promise(resolve => { - let headerParameters = { headers: { 'Content-Type': 'application/x-www-form-urlencoded' } }; - let queryParameters = { - refreshToken: credentials.refresh_token, - grant_type: "refresh_token", - ...installed - }; - let url = `${refreshEndpoint}?${qs.stringify(queryParameters)}`; - request.post(url, headerParameters).then(async response => { - let { access_token, expires_in } = JSON.parse(response); - const expiry_date = new Date().getTime() + (expires_in * 1000); - await Database.Auxiliary.GoogleAuthenticationToken.Update(userId, access_token, expiry_date); - credentials.access_token = access_token; - credentials.expiry_date = expiry_date; - resolve({ credentials, refreshed: true }); - }); + async function refreshAccessToken(credentials: Credentials, userId: string): Promise { + let headerParameters = { headers: { 'Content-Type': 'application/x-www-form-urlencoded' } }; + let url = `${refreshEndpoint}?${qs.stringify({ + refreshToken: credentials.refresh_token, + grant_type: "refresh_token", + ...installed + })}`; + const { access_token, expires_in } = await new Promise(async resolve => { + const response = await request.post(url, headerParameters); + resolve(JSON.parse(response)); }); - }; + // expires_in is in seconds, but we're building the new expiry date in milliseconds + const expiry_date = new Date().getTime() + (expires_in * 1000); + await Database.Auxiliary.GoogleAuthenticationToken.Update(userId, access_token, expiry_date); + // update the relevant properties + credentials.access_token = access_token; + credentials.expiry_date = expiry_date; + return credentials; + } } \ No newline at end of file diff --git a/src/server/index.ts b/src/server/index.ts index eb19c71a9..860cde3b5 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -593,11 +593,11 @@ function routeSetter(router: RouteManager) { subscription: RouteStore.readGoogleAccessToken, onValidation: async ({ user, res }) => { const userId = user.id; - const token = await Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId); + const token = await GoogleApiServerUtils.retrieveAccessToken(userId); if (!token) { - return res.send(await GoogleApiServerUtils.generateAuthenticationUrl()); + return res.send(GoogleApiServerUtils.generateAuthenticationUrl()); } - return GoogleApiServerUtils.retrieveAccessToken(userId).then(token => res.send(token)); + return res.send(token); } }); @@ -609,7 +609,7 @@ function routeSetter(router: RouteManager) { } }); - const tokenError = "Unable to successfully upload bytes for all images!"; + const authenticationError = "Unable to authenticate Google credentials before uploading to Google Photos!"; const mediaError = "Unable to convert all uploaded bytes to media items!"; router.addSupervisedRoute({ @@ -618,8 +618,12 @@ function routeSetter(router: RouteManager) { onValidation: async ({ user, req, res }) => { const { media } = req.body; - let failed: number[] = []; const token = await GoogleApiServerUtils.retrieveAccessToken(user.id); + if (!token) { + return _error(res, authenticationError); + } + + let failed: number[] = []; const newMediaItems = await BatchedArray.from(media, { batchSize: 25 }).batchedMapPatientInterval( { magnitude: 100, unit: TimeUnit.Milliseconds }, async (batch: GooglePhotosUploadUtils.MediaInput[]) => { -- cgit v1.2.3-70-g09d2 From d4d8c2835c8e1e943f77a14e2b87df05f5848dbd Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 29 Oct 2019 23:26:20 -0400 Subject: finished cleaning and commenting GoogleApiServerUtils --- src/server/apis/google/GoogleApiServerUtils.ts | 233 ++++++++++++------------- 1 file changed, 114 insertions(+), 119 deletions(-) (limited to 'src') diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index ff5dc7081..ec7c2cfe1 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -2,7 +2,6 @@ import { google } from "googleapis"; import { readFile } from "fs"; import { OAuth2Client, Credentials, OAuth2ClientOptions } from "google-auth-library"; import { Opt } from "../../../new_fields/Doc"; -import { GlobalOptions } from "googleapis-common"; import { GaxiosResponse } from "gaxios"; import request = require('request-promise'); import * as qs from 'query-string'; @@ -10,19 +9,12 @@ import { Database } from "../../database"; import * as path from "path"; /** - * + * Scopes give Google users fine granularity of control + * over the information they make accessible via the API. + * This is the somewhat overkill list of what Dash requests + * from the user. */ -const prefix = 'https://www.googleapis.com/auth/'; - -/** - * - */ -const refreshEndpoint = "https://oauth2.googleapis.com/token"; - -/** - * - */ -const SCOPES = [ +const scope = [ 'documents.readonly', 'documents', 'presentations', @@ -33,7 +25,7 @@ const SCOPES = [ 'photoslibrary.appendonly', 'photoslibrary.sharing', 'userinfo.profile' -]; +].map(relative => `https://www.googleapis.com/auth/${relative}`); /** * This namespace manages server side authentication for Google API queries, either @@ -42,33 +34,9 @@ const SCOPES = [ export namespace GoogleApiServerUtils { /** - * - */ - export interface CredentialsResult { - credentials: Opt; - refreshed: boolean; - } - - /** - * - */ - export interface UserInfo { - at_hash: string; - aud: string; - azp: string; - exp: number; - family_name: string; - given_name: string; - iat: number; - iss: string; - locale: string; - name: string; - picture: string; - sub: string; - } - - /** - * + * As we expand out to more Google APIs that are accessible from + * the 'googleapis' module imported above, this enum will record + * the list and provide a unified string representation of each API. */ export enum Service { Documents = "Documents", @@ -76,15 +44,10 @@ export namespace GoogleApiServerUtils { } /** - * - */ - export interface CredentialInformation { - credentialsPath: string; - userId: string; - } - - /** - * + * Global credentials read once from a JSON file + * before the server is started that + * allow us to build OAuth2 clients with Dash's + * application specific credentials. */ let installed: OAuth2ClientOptions; @@ -99,27 +62,33 @@ export namespace GoogleApiServerUtils { let worker: OAuth2Client; /** - * + * A briefer format for the response from a 'googleapis' API request */ export type ApiResponse = Promise; /** - * + * A generic form for a handler that executes some request on the endpoint */ export type ApiRouter = (endpoint: Endpoint, parameters: any) => ApiResponse; /** - * + * A generic form for the asynchronous function that actually submits the + * request to the API and returns the corresporing response. Helpful when + * making an extensible endpoint definition. */ export type ApiHandler = (parameters: any, methodOptions?: any) => ApiResponse; /** - * + * A literal union type indicating the valid actions for these 'googleapis' + * requestions */ export type Action = "create" | "retrieve" | "update"; /** - * + * An interface defining any entity on which one can invoke + * anuy of the following handlers. All 'googleapis' wrappers + * such as google.docs().documents and google.slides().presentations + * satisfy this interface. */ export interface Endpoint { get: ApiHandler; @@ -128,12 +97,10 @@ export namespace GoogleApiServerUtils { } /** - * - */ - export type EndpointParameters = GlobalOptions & { version: "v1" }; - - /** - * + * This function is called once before the server is started, + * reading in Dash's project-specific credentials (client secret + * and client id) for later repeated access. It also sets up the + * global, intentionally unauthenticated worker OAuth2 client instance. */ export async function loadClientSecret(): Promise { return new Promise((resolve, reject) => { @@ -156,75 +123,83 @@ export namespace GoogleApiServerUtils { } /** - * + * Maps the Dash user id of a given user to their single + * associated OAuth2 client, mitigating the creation + * of needless duplicate clients that would arise from + * making one new client instance per request. */ const authenticationClients = new Map(); /** - * - * @param sector - * @param userId + * This function receives the target sector ("which G-Suite app's API am I interested in?") + * and the id of the Dash user making the request to the API. With this information, it generates + * an authenticated OAuth2 client and passes it into the relevant 'googleapis' wrapper. + * @param sector the particular desired G-Suite 'googleapis' API (docs, slides, etc.) + * @param userId the id of the Dash user making the request to the API + * @returns the relevant 'googleapis' wrapper, if any */ export async function GetEndpoint(sector: string, userId: string): Promise> { - return new Promise(resolve => { - retrieveOAuthClient(userId).then(auth => { - if (!auth) { - return resolve(); - } - let routed: Opt; - let parameters: EndpointParameters = { auth, version: "v1" }; - switch (sector) { - case Service.Documents: - routed = google.docs(parameters).documents; - break; - case Service.Slides: - routed = google.slides(parameters).presentations; - break; - } - resolve(routed); - }); + return new Promise(async resolve => { + const auth = await retrieveOAuthClient(userId); + if (!auth) { + return resolve(); + } + let routed: Opt; + let parameters: any = { auth, version: "v1" }; + switch (sector) { + case Service.Documents: + routed = google.docs(parameters).documents; + break; + case Service.Slides: + routed = google.slides(parameters).presentations; + break; + } + resolve(routed); }); } /** - * - * @param userId + * Returns the lengthy string or access token that can be passed into + * the headers of an API request or into the constructor of the Photos + * client API wrapper. + * @param userId the Dash user id of the user requesting his/her associated + * access_token + * @returns the current access_token associated with the requesting + * Dash user. The access_token is valid for only an hour, and + * is then refreshed. */ export async function retrieveAccessToken(userId: string): Promise { - return new Promise(resolve => { - retrieveCredentials(userId).then( - ({ credentials }) => { - if (credentials) { - return resolve(credentials.access_token!); - } - resolve(); - } - ); + return new Promise(async resolve => { + const { credentials } = await retrieveCredentials(userId); + if (!credentials) { + return resolve(); + } + resolve(credentials.access_token!); }); } /** - * Returns an initialized OAuth2 client instance, likely to be passed into Google's + * Manipulates a mapping such that, in the limit, each Dash user has + * an associated authenticated OAuth2 client at their disposal. This + * function ensures that the client's credentials always remain up to date + * @param userId the Dash user id of the user requesting account integration + * @returns returns an initialized OAuth2 client instance, likely to be passed into Google's * npm-installed API wrappers that use authenticated client instances rather than access codes for * security. - * @param userId the Dash user id of the user requesting account integration */ export async function retrieveOAuthClient(userId: string): Promise { - return new Promise((resolve, reject) => { - retrieveCredentials(userId).then( - ({ credentials, refreshed }) => { - if (!credentials) { - return resolve(); - } - let client = authenticationClients.get(userId); - if (!client) { - authenticationClients.set(userId, client = generateClient(credentials)); - } else if (refreshed) { - client.setCredentials(credentials); - } - resolve(client); - } - ); + return new Promise(async resolve => { + const { credentials, refreshed } = await retrieveCredentials(userId); + if (!credentials) { + return resolve(); + } + let client = authenticationClients.get(userId); + if (!client) { + authenticationClients.set(userId, client = generateClient(credentials)); + } else if (refreshed) { + client.setCredentials(credentials); + } + resolve(client); }); } @@ -233,6 +208,7 @@ export namespace GoogleApiServerUtils { * the specific credentials on the client * @param credentials if you have access to the credentials that you'll eventually set on * the client, just pass them in at initialization + * @returns the newly created, potentially certified, OAuth2 client instance */ function generateClient(credentials?: Credentials): OAuth2Client { const client = new google.auth.OAuth2(installed); @@ -244,12 +220,10 @@ export namespace GoogleApiServerUtils { * Calls on the worker (which does not have and does not need * any credentials) to produce a url to which the user can * navigate to give Dash the necessary Google permissions. + * @returns the newly generated url to the authentication landing page */ export function generateAuthenticationUrl(): string { - return worker.generateAuthUrl({ - access_type: 'offline', - scope: SCOPES.map(relative => prefix + relative), - }); + return worker.generateAuthUrl({ scope, access_type: 'offline' }); } /** @@ -305,6 +279,26 @@ export namespace GoogleApiServerUtils { */ export type EnrichedCredentials = Credentials & { userInfo: UserInfo }; + /** + * This interface defines all of the information we + * receive from parsing the base64 encoded info-token + * for a Google user. + */ + export interface UserInfo { + at_hash: string; + aud: string; + azp: string; + exp: number; + family_name: string; + given_name: string; + iat: number; + iss: string; + locale: string; + name: string; + picture: string; + sub: string; + } + /** * It's pretty cool: the credentials id_token is split into thirds by periods. * The middle third contains a base64-encoded JSON string with all the @@ -316,7 +310,7 @@ export namespace GoogleApiServerUtils { * in the database. */ function injectUserInfo(credentials: Credentials): EnrichedCredentials { - const userInfo = JSON.parse(atob(credentials.id_token!.split(".")[1])); + const userInfo: UserInfo = JSON.parse(atob(credentials.id_token!.split(".")[1])); return { ...credentials, userInfo }; } @@ -326,15 +320,16 @@ export namespace GoogleApiServerUtils { * automatically refresh the credentials and then resolve with the updated values. * @param userId the id of the Dash user requesting his/her credentials. Eventually, each user might * be associated with multiple different sets of Google credentials. - * @returns the credentials and a flag indicating whether or not they were refreshed during retrieval + * @returns the credentials, or undefined if the user has no stored associated credentials, + * and a flag indicating whether or not they were refreshed during retrieval */ - async function retrieveCredentials(userId: string): Promise { + async function retrieveCredentials(userId: string): Promise<{ credentials: Opt, refreshed: boolean }> { let credentials: Opt = await Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId); let refreshed = false; if (!credentials) { return { credentials: undefined, refreshed }; } - // if the token has expired, submit a request for a refreshed access token + // check for token expiry if (credentials.expiry_date! <= new Date().getTime()) { credentials = await refreshAccessToken(credentials, userId); } @@ -353,7 +348,7 @@ export namespace GoogleApiServerUtils { */ async function refreshAccessToken(credentials: Credentials, userId: string): Promise { let headerParameters = { headers: { 'Content-Type': 'application/x-www-form-urlencoded' } }; - let url = `${refreshEndpoint}?${qs.stringify({ + let url = `https://oauth2.googleapis.com/token?${qs.stringify({ refreshToken: credentials.refresh_token, grant_type: "refresh_token", ...installed -- cgit v1.2.3-70-g09d2 From 109be54065038392b19d9dbafbccc9205f198766 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 29 Oct 2019 23:37:42 -0400 Subject: db error handling and example code --- src/server/apis/google/GoogleApiServerUtils.ts | 3 +++ src/server/database.ts | 4 ++++ 2 files changed, 7 insertions(+) (limited to 'src') diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index ec7c2cfe1..35a2541a9 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -248,6 +248,9 @@ export namespace GoogleApiServerUtils { * with a Dash user in the googleAuthentication table of the database. * @param authenticationCode the Google-provided authentication code that the user copied * from Google's permissions UI and pasted into the overlay. + * + * EXAMPLE CODE: 4/sgF2A5uGg4xASHf7VQDnLtdqo3mUlfQqLSce_HYz5qf1nFtHj9YTeGs + * * @returns the information necessary to authenticate a client side google photos request * and display basic user information in the overlay on successful authentication. * This can be expanded as needed by adding properties to the interface GoogleAuthenticationResult. diff --git a/src/server/database.ts b/src/server/database.ts index 79dd26b7d..b81fc03a4 100644 --- a/src/server/database.ts +++ b/src/server/database.ts @@ -60,6 +60,10 @@ export namespace Database { constructor() { this.MongoClient.connect(url, (_err, client) => { + if (!client) { + console.error("\nPlease start MongoDB by running 'mongod' in a terminal before continuing...\n"); + process.exit(0); + } this.db = client.db(); this.onConnect.forEach(fn => fn()); }); -- cgit v1.2.3-70-g09d2 From 9c7e619fb9d3116649ec3779bd528b947235d5a4 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 30 Oct 2019 15:51:29 -0400 Subject: updated array batcher --- package.json | 2 +- .../util/Import & Export/DirectoryImportBox.tsx | 5 +-- src/server/RouteManager.ts | 1 + src/server/apis/google/GooglePhotosUploadUtils.ts | 11 ++--- src/server/index.ts | 48 ++++++++++++---------- 5 files changed, 36 insertions(+), 31 deletions(-) (limited to 'src') diff --git a/package.json b/package.json index 8cbbb84af..4572a3f73 100644 --- a/package.json +++ b/package.json @@ -115,7 +115,7 @@ "@types/youtube": "0.0.38", "adm-zip": "^0.4.13", "archiver": "^3.0.3", - "array-batcher": "^1.1.3", + "array-batcher": "^1.2.3", "async": "^2.6.2", "babel-runtime": "^6.26.0", "bcrypt-nodejs": "0.0.3", diff --git a/src/client/util/Import & Export/DirectoryImportBox.tsx b/src/client/util/Import & Export/DirectoryImportBox.tsx index 2d1b6fe20..bdd59cb16 100644 --- a/src/client/util/Import & Export/DirectoryImportBox.tsx +++ b/src/client/util/Import & Export/DirectoryImportBox.tsx @@ -107,7 +107,7 @@ export default class DirectoryImportBox extends React.Component runInAction(() => this.phase = `Internal: uploading ${this.quota - this.completed} files to Dash...`); - const uploads = await BatchedArray.from(validated, { batchSize: 15 }).batchedMapAsync(async batch => { + const uploads = await BatchedArray.from(validated, { batchSize: 15 }).batchedMapAsync(async (batch, collector) => { const formData = new FormData(); batch.forEach(file => { @@ -116,9 +116,8 @@ export default class DirectoryImportBox extends React.Component formData.append(Utils.GenerateGuid(), file); }); - const responses = await Networking.PostFormDataToServer(RouteStore.upload, formData); + collector.push(...(await Networking.PostFormDataToServer(RouteStore.upload, formData))); runInAction(() => this.completed += batch.length); - return responses as ImageUploadResponse[]; }); await Promise.all(uploads.map(async upload => { diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts index eda2a49d2..c1d38327f 100644 --- a/src/server/RouteManager.ts +++ b/src/server/RouteManager.ts @@ -114,6 +114,7 @@ export const STATUS = { }; export function _error(res: express.Response, message: string, error?: any) { + console.error(message); res.statusMessage = message; res.status(STATUS.EXECUTION_ERROR).send(error); } diff --git a/src/server/apis/google/GooglePhotosUploadUtils.ts b/src/server/apis/google/GooglePhotosUploadUtils.ts index 172fa8d46..d3442338b 100644 --- a/src/server/apis/google/GooglePhotosUploadUtils.ts +++ b/src/server/apis/google/GooglePhotosUploadUtils.ts @@ -1,7 +1,7 @@ import request = require('request-promise'); import { GoogleApiServerUtils } from './GoogleApiServerUtils'; import * as path from 'path'; -import { MediaItemCreationResult } from './SharedTypes'; +import { MediaItemCreationResult, NewMediaItemResult } from './SharedTypes'; import { NewMediaItem } from "../../index"; import { BatchedArray, TimeUnit } from 'array-batcher'; import { DashUploadUtils } from '../../DashUploadUtils'; @@ -50,9 +50,9 @@ export namespace GooglePhotosUploadUtils { }; export const CreateMediaItems = async (bearerToken: string, newMediaItems: NewMediaItem[], album?: { id: string }): Promise => { - const newMediaItemResults = await BatchedArray.from(newMediaItems, { batchSize: 50 }).batchedMapPatientInterval( + const newMediaItemResults = await BatchedArray.from(newMediaItems, { batchSize: 50 }).batchedMapPatientInterval( { magnitude: 100, unit: TimeUnit.Milliseconds }, - async (batch: NewMediaItem[]) => { + async (batch: NewMediaItem[], collector) => { const parameters = { method: 'POST', headers: headers('json', bearerToken), @@ -61,7 +61,7 @@ export namespace GooglePhotosUploadUtils { json: true }; album && (parameters.body.albumId = album.id); - return (await new Promise((resolve, reject) => { + const { newMediaItemResults } = await new Promise((resolve, reject) => { request(parameters, (error, _response, body) => { if (error) { reject(error); @@ -69,7 +69,8 @@ export namespace GooglePhotosUploadUtils { resolve(body); } }); - })).newMediaItemResults; + }); + collector.push(...newMediaItemResults); } ); return { newMediaItemResults }; diff --git a/src/server/index.ts b/src/server/index.ts index 860cde3b5..05c866eae 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -573,18 +573,15 @@ function routeSetter(router: RouteManager) { onValidation: async ({ req, res, user }) => { let sector: GoogleApiServerUtils.Service = req.params.sector as GoogleApiServerUtils.Service; let action: GoogleApiServerUtils.Action = req.params.action as GoogleApiServerUtils.Action; - return GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], user.id).then(endpoint => { - let handler = EndpointHandlerMap.get(action); - if (endpoint && handler) { - let execute = handler(endpoint, req.body).then( - response => res.send(response.data), - rejection => res.send(rejection) - ); - execute.catch(exception => res.send(exception)); - return; - } - res.send(undefined); - }); + const endpoint = await GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], user.id); + let handler = EndpointHandlerMap.get(action); + if (endpoint && handler) { + handler(endpoint, req.body) + .then(response => res.send(response.data)) + .catch(exception => res.send(exception)); + return; + } + res.send(undefined); } }); @@ -611,6 +608,12 @@ function routeSetter(router: RouteManager) { const authenticationError = "Unable to authenticate Google credentials before uploading to Google Photos!"; const mediaError = "Unable to convert all uploaded bytes to media items!"; + interface GooglePhotosUploadFailure { + batch: number; + index: number; + url: string; + reason: string; + } router.addSupervisedRoute({ method: Method.POST, @@ -623,30 +626,31 @@ function routeSetter(router: RouteManager) { return _error(res, authenticationError); } - let failed: number[] = []; - const newMediaItems = await BatchedArray.from(media, { batchSize: 25 }).batchedMapPatientInterval( + let failed: GooglePhotosUploadFailure[] = []; + const batched = BatchedArray.from(media, { batchSize: 25 }); + const newMediaItems = await batched.batchedMapPatientInterval( { magnitude: 100, unit: TimeUnit.Milliseconds }, - async (batch: GooglePhotosUploadUtils.MediaInput[]) => { - const newMediaItems: NewMediaItem[] = []; + async (batch, collector, { completedBatches }) => { for (let index = 0; index < batch.length; index++) { - const element = batch[index]; - const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(token, element.url); + const { url, description } = batch[index]; + const fail = (reason: string) => failed.push({ reason, batch: completedBatches + 1, index, url }); + const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(token, url).catch(fail); if (!uploadToken) { - failed.push(index); + fail(`${path.extname(url)} is not an accepted extension`); } else { - newMediaItems.push({ - description: element.description, + collector.push({ + description, simpleMediaItem: { uploadToken } }); } } - return newMediaItems; } ); const failedCount = failed.length; if (failedCount) { console.error(`Unable to upload ${failedCount} image${failedCount === 1 ? "" : "s"} to Google's servers`); + console.log(failed.map(({ reason, batch, index, url }) => `@${batch}.${index}: ${url} failed: ${reason}`).join('\n')); } return GooglePhotosUploadUtils.CreateMediaItems(token, newMediaItems, req.body.album).then( -- cgit v1.2.3-70-g09d2 From f48b2729b294d08da0c99a242f9ebb4d7aab4407 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 31 Oct 2019 01:58:42 -0400 Subject: commented and cleaned google photos upload utils --- src/server/DashUploadUtils.ts | 6 +- src/server/apis/google/GooglePhotosUploadUtils.ts | 100 +++++++++++++++++----- src/server/index.ts | 2 +- 3 files changed, 84 insertions(+), 24 deletions(-) (limited to 'src') diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index 46d897339..9fddb466c 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -24,9 +24,13 @@ export namespace DashUploadUtils { const gifs = [".gif"]; const pngs = [".png"]; const jpgs = [".jpg", ".jpeg"]; - export const imageFormats = [...pngs, ...jpgs, ...gifs]; + const imageFormats = [...pngs, ...jpgs, ...gifs]; const videoFormats = [".mov", ".mp4"]; + export function validateExtension(url: string) { + return imageFormats.includes(path.extname(url).toLowerCase()); + } + const size = "content-length"; const type = "content-type"; diff --git a/src/server/apis/google/GooglePhotosUploadUtils.ts b/src/server/apis/google/GooglePhotosUploadUtils.ts index d3442338b..a98399621 100644 --- a/src/server/apis/google/GooglePhotosUploadUtils.ts +++ b/src/server/apis/google/GooglePhotosUploadUtils.ts @@ -1,56 +1,111 @@ import request = require('request-promise'); -import { GoogleApiServerUtils } from './GoogleApiServerUtils'; import * as path from 'path'; import { MediaItemCreationResult, NewMediaItemResult } from './SharedTypes'; import { NewMediaItem } from "../../index"; import { BatchedArray, TimeUnit } from 'array-batcher'; import { DashUploadUtils } from '../../DashUploadUtils'; +/** + * This namespace encompasses the logic + * necessary to upload images to Google's server, + * and then initialize / create those images in the Photos + * API given the upload tokens returned from the initial + * uploading process. + * + * https://developers.google.com/photos/library/reference/rest/v1/mediaItems/batchCreate + */ export namespace GooglePhotosUploadUtils { - export interface Paths { - uploadDirectory: string; - credentialsPath: string; - tokenPath: string; - } - - export interface MediaInput { + /** + * Specifies the structure of the object + * necessary to upload bytes to Google's servers. + * The url is streamed to access the image's bytes, + * and the description is what appears in Google Photos' + * description field. + */ + export interface UploadSource { url: string; description: string; } - const prepend = (extension: string) => `https://photoslibrary.googleapis.com/v1/${extension}`; - const headers = (type: string, token: string) => ({ - 'Content-Type': `application/${type}`, - 'Authorization': `Bearer ${token}`, - }); + /** + * A utility function to streamline making + * calls to the API's url - accentuates + * the relative path in the caller. + * @param extension the desired + * subset of the API + */ + function prepend(extension: string): string { + return `https://photoslibrary.googleapis.com/v1/${extension}`; + } + + /** + * Factors out the creation of the API request's + * authentication elements stored in the header. + * @param type the contents of the request + * @param token the user-specific Google access token + */ + function headers(type: string, token: string) { + return { + 'Content-Type': `application/${type}`, + 'Authorization': `Bearer ${token}`, + }; + } - export const DispatchGooglePhotosUpload = async (bearerToken: string, url: string) => { - if (!DashUploadUtils.imageFormats.includes(path.extname(url))) { + /** + * This is the first step in the remote image creation process. + * Here we upload the raw bytes of the image to Google's servers by + * setting authentication and other required header properties and including + * the raw bytes to the image, to be uploaded, in the body of the request. + * @param bearerToken the user-specific Google access token, specifies the account associated + * with the eventual image creation + * @param url the url of the image to upload + * @param filename an optional name associated with the uploaded image - if not specified + * defaults to the filename (basename) in the url + */ + export const DispatchGooglePhotosUpload = async (bearerToken: string, url: string, filename?: string): Promise => { + // check if the url points to a non-image or an unsupported format + if (!DashUploadUtils.validateExtension(url)) { return undefined; } - const body = await request(url, { encoding: null }); const parameters = { method: 'POST', + uri: prepend('uploads'), headers: { ...headers('octet-stream', bearerToken), - 'X-Goog-Upload-File-Name': path.basename(url), + 'X-Goog-Upload-File-Name': filename || path.basename(url), 'X-Goog-Upload-Protocol': 'raw' }, - uri: prepend('uploads'), - body + body: await request(url, { encoding: null }) // returns a readable stream with the unencoded binary image data }; - return new Promise((resolve, reject) => request(parameters, (error, _response, body) => { + return new Promise((resolve, reject) => request(parameters, (error, _response, body) => { if (error) { - console.log(error); + // on rejection, the server logs the error and the offending image return reject(error); } resolve(body); })); }; + /** + * This is the second step in the remote image creation process: having uploaded + * the raw bytes of the image and received / stored pointers (upload tokens) to those + * bytes, we can now instruct the API to finalize the creation of those images by + * submitting a batch create request with the list of upload tokens and the description + * to be associated with reach resulting new image. + * @param bearerToken the user-specific Google access token, specifies the account associated + * with the eventual image creation + * @param newMediaItems a list of objects containing a description and, effectively, the + * pointer to the uploaded bytes + * @param album if included, will add all of the newly created remote images to the album + * with the specified id + */ export const CreateMediaItems = async (bearerToken: string, newMediaItems: NewMediaItem[], album?: { id: string }): Promise => { - const newMediaItemResults = await BatchedArray.from(newMediaItems, { batchSize: 50 }).batchedMapPatientInterval( + // it's important to note that the API can't handle more than 50 items in each request and + // seems to need at least some latency between requests (spamming it synchronously has led to the server returning errors)... + const batched = BatchedArray.from(newMediaItems, { batchSize: 50 }); + // ...so we execute them in delayed batches and await the entire execution + const newMediaItemResults = await batched.batchedMapPatientInterval( { magnitude: 100, unit: TimeUnit.Milliseconds }, async (batch: NewMediaItem[], collector) => { const parameters = { @@ -60,6 +115,7 @@ export namespace GooglePhotosUploadUtils { body: { newMediaItems: batch } as any, json: true }; + // register the target album, if provided album && (parameters.body.albumId = album.id); const { newMediaItemResults } = await new Promise((resolve, reject) => { request(parameters, (error, _response, body) => { diff --git a/src/server/index.ts b/src/server/index.ts index 05c866eae..9f3e34761 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -627,7 +627,7 @@ function routeSetter(router: RouteManager) { } let failed: GooglePhotosUploadFailure[] = []; - const batched = BatchedArray.from(media, { batchSize: 25 }); + const batched = BatchedArray.from(media, { batchSize: 25 }); const newMediaItems = await batched.batchedMapPatientInterval( { magnitude: 100, unit: TimeUnit.Milliseconds }, async (batch, collector, { completedBatches }) => { -- cgit v1.2.3-70-g09d2 From c53d599f8ecffe173d8df06777721658f065674a Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 31 Oct 2019 14:32:39 -0400 Subject: cleanup --- src/server/apis/google/GooglePhotosUploadUtils.ts | 12 +++++------- src/server/index.ts | 4 ++-- 2 files changed, 7 insertions(+), 9 deletions(-) (limited to 'src') diff --git a/src/server/apis/google/GooglePhotosUploadUtils.ts b/src/server/apis/google/GooglePhotosUploadUtils.ts index a98399621..d8cf795b5 100644 --- a/src/server/apis/google/GooglePhotosUploadUtils.ts +++ b/src/server/apis/google/GooglePhotosUploadUtils.ts @@ -100,12 +100,12 @@ export namespace GooglePhotosUploadUtils { * @param album if included, will add all of the newly created remote images to the album * with the specified id */ - export const CreateMediaItems = async (bearerToken: string, newMediaItems: NewMediaItem[], album?: { id: string }): Promise => { + export const CreateMediaItems = async (bearerToken: string, newMediaItems: NewMediaItem[], album?: { id: string }): Promise => { // it's important to note that the API can't handle more than 50 items in each request and // seems to need at least some latency between requests (spamming it synchronously has led to the server returning errors)... const batched = BatchedArray.from(newMediaItems, { batchSize: 50 }); // ...so we execute them in delayed batches and await the entire execution - const newMediaItemResults = await batched.batchedMapPatientInterval( + return batched.batchedMapPatientInterval( { magnitude: 100, unit: TimeUnit.Milliseconds }, async (batch: NewMediaItem[], collector) => { const parameters = { @@ -117,19 +117,17 @@ export namespace GooglePhotosUploadUtils { }; // register the target album, if provided album && (parameters.body.albumId = album.id); - const { newMediaItemResults } = await new Promise((resolve, reject) => { + collector.push(...(await new Promise((resolve, reject) => { request(parameters, (error, _response, body) => { if (error) { reject(error); } else { - resolve(body); + resolve(body.newMediaItemResults); } }); - }); - collector.push(...newMediaItemResults); + }))); } ); - return { newMediaItemResults }; }; } \ No newline at end of file diff --git a/src/server/index.ts b/src/server/index.ts index 9f3e34761..25697e71f 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -650,11 +650,11 @@ function routeSetter(router: RouteManager) { const failedCount = failed.length; if (failedCount) { console.error(`Unable to upload ${failedCount} image${failedCount === 1 ? "" : "s"} to Google's servers`); - console.log(failed.map(({ reason, batch, index, url }) => `@${batch}.${index}: ${url} failed: ${reason}`).join('\n')); + console.log(failed.map(({ reason, batch, index, url }) => `@${batch}.${index}: ${url} failed:\n${reason}`).join('\n\n')); } return GooglePhotosUploadUtils.CreateMediaItems(token, newMediaItems, req.body.album).then( - result => _success(res, { results: result.newMediaItemResults, failed }), + results => _success(res, { results, failed }), error => _error(res, mediaError, error) ); } -- cgit v1.2.3-70-g09d2 From 0b72a27ead9d1e933ae349b8a3e9e9b8702664d1 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sat, 9 Nov 2019 16:18:23 -0500 Subject: factored out all but google resources into managers --- client_secret.json | 1 - .../util/Import & Export/DirectoryImportBox.tsx | 10 +- src/server/ApiManagers/DeleteManager.ts | 65 +++ src/server/ApiManagers/ExportManager.ts | 157 +++++- src/server/ApiManagers/PDFManager.ts | 107 ++++ src/server/ApiManagers/SearchManager.ts | 4 +- src/server/ApiManagers/UploadManager.ts | 227 ++++++++ src/server/ApiManagers/UserManager.ts | 47 +- src/server/DashUploadUtils.ts | 17 +- src/server/SharedMediaTypes.ts | 9 + src/server/Websocket/Websocket.ts | 19 +- src/server/apis/google/GoogleApiServerUtils.ts | 46 +- src/server/apis/google/GooglePhotosUploadUtils.ts | 19 +- src/server/credentials/CredentialsLoader.ts | 29 ++ .../credentials/google_docs_credentials.json | 11 - .../credentials/google_project_credentials.json | 14 + src/server/credentials/test.json | 14 + src/server/index.ts | 570 ++------------------- 18 files changed, 770 insertions(+), 596 deletions(-) delete mode 100644 client_secret.json create mode 100644 src/server/ApiManagers/DeleteManager.ts create mode 100644 src/server/ApiManagers/PDFManager.ts create mode 100644 src/server/ApiManagers/UploadManager.ts create mode 100644 src/server/SharedMediaTypes.ts create mode 100644 src/server/credentials/CredentialsLoader.ts delete mode 100644 src/server/credentials/google_docs_credentials.json create mode 100644 src/server/credentials/google_project_credentials.json create mode 100644 src/server/credentials/test.json (limited to 'src') diff --git a/client_secret.json b/client_secret.json deleted file mode 100644 index a9c698421..000000000 --- a/client_secret.json +++ /dev/null @@ -1 +0,0 @@ -{"installed":{"client_id":"1005546247619-kqpnvh42mpa803tem8556b87umi4j9r0.apps.googleusercontent.com","project_id":"brown-dash","auth_uri":"https://accounts.google.com/o/oauth2/auth","token_uri":"https://oauth2.googleapis.com/token","auth_provider_x509_cert_url":"https://www.googleapis.com/oauth2/v1/certs","client_secret":"WshLb5TH9SdFVGGbQcnYj7IU","redirect_uris":["urn:ietf:wg:oauth:2.0:oob","http://localhost"]}} \ No newline at end of file diff --git a/src/client/util/Import & Export/DirectoryImportBox.tsx b/src/client/util/Import & Export/DirectoryImportBox.tsx index bdd59cb16..2e0ba25eb 100644 --- a/src/client/util/Import & Export/DirectoryImportBox.tsx +++ b/src/client/util/Import & Export/DirectoryImportBox.tsx @@ -22,6 +22,9 @@ import { SchemaHeaderField } from "../../../new_fields/SchemaHeaderField"; import "./DirectoryImportBox.scss"; import { Networking } from "../../Network"; import { BatchedArray } from "array-batcher"; +import * as path from 'path'; +import { DashUploadUtils } from "../../../server/DashUploadUtils"; +import { SharedMediaTypes } from "../../../server/SharedMediaTypes"; const unsupported = ["text/html", "text/plain"]; @@ -94,7 +97,12 @@ export default class DirectoryImportBox extends React.Component let validated: File[] = []; for (let i = 0; i < files.length; i++) { let file = files.item(i); - file && !unsupported.includes(file.type) && validated.push(file); + if (file && !unsupported.includes(file.type)) { + const ext = path.extname(file.name).toLowerCase(); + if (SharedMediaTypes.imageFormats.includes(ext)) { + validated.push(file); + } + } } runInAction(() => { diff --git a/src/server/ApiManagers/DeleteManager.ts b/src/server/ApiManagers/DeleteManager.ts new file mode 100644 index 000000000..bbf1d0425 --- /dev/null +++ b/src/server/ApiManagers/DeleteManager.ts @@ -0,0 +1,65 @@ +import ApiManager, { Registration } from "./ApiManager"; +import { Method, _permission_denied } from "../RouteManager"; +import { RouteStore } from "../RouteStore"; +import { WebSocket } from "../Websocket/Websocket"; +import { Database } from "../database"; + +export default class DeleteManager extends ApiManager { + + protected initialize(register: Registration): void { + + register({ + method: Method.GET, + subscription: RouteStore.delete, + onValidation: async ({ res, isRelease }) => { + if (isRelease) { + return _permission_denied(res, deletionPermissionError); + } + await WebSocket.deleteFields(); + res.redirect(RouteStore.home); + } + }); + + register({ + method: Method.GET, + subscription: RouteStore.deleteAll, + onValidation: async ({ res, isRelease }) => { + if (isRelease) { + return _permission_denied(res, deletionPermissionError); + } + await WebSocket.deleteAll(); + res.redirect(RouteStore.home); + } + }); + + + register({ + method: Method.GET, + subscription: "/deleteWithAux", + onValidation: async ({ res, isRelease }) => { + if (isRelease) { + return _permission_denied(res, deletionPermissionError); + } + await Database.Auxiliary.DeleteAll(); + res.redirect(RouteStore.delete); + } + }); + + register({ + method: Method.GET, + subscription: "/deleteWithGoogleCredentials", + onValidation: async ({ res, isRelease }) => { + if (isRelease) { + return _permission_denied(res, deletionPermissionError); + } + await Database.Auxiliary.GoogleAuthenticationToken.DeleteAll(); + res.redirect(RouteStore.delete); + } + }); + + + } + +} + +const deletionPermissionError = "Cannot perform a delete operation outside of the development environment!"; diff --git a/src/server/ApiManagers/ExportManager.ts b/src/server/ApiManagers/ExportManager.ts index 14ac7dd5b..d42db1056 100644 --- a/src/server/ApiManagers/ExportManager.ts +++ b/src/server/ApiManagers/ExportManager.ts @@ -1,5 +1,5 @@ import ApiManager, { Registration } from "./ApiManager"; -import RouteManager, { Method } from "../RouteManager"; +import { Method } from "../RouteManager"; import RouteSubscriber from "../RouteSubscriber"; import { RouteStore } from "../RouteStore"; import * as Archiver from 'archiver'; @@ -7,6 +7,7 @@ import * as express from 'express'; import { Database } from "../database"; import * as path from "path"; import { DashUploadUtils } from "../DashUploadUtils"; +import { publicDirectory } from ".."; export type Hierarchy = { [id: string]: string | Hierarchy }; export type ZipMutator = (file: Archiver.Archiver) => void | Promise; @@ -15,10 +16,20 @@ export interface DocumentElements { title: string; } -export default class ExportManager extends ApiManager { +export default class DownloadManager extends ApiManager { protected initialize(register: Registration): void { + /** + * Let's say someone's using Dash to organize images in collections. + * This lets them export the hierarchy they've built to their + * own file system in a useful format. + * + * This handler starts with a single document id (interesting only + * if it's that of a collection). It traverses the database, captures + * the nesting of only nested images or collections, writes + * that to a zip file and returns it to the client for download. + */ register({ method: Method.GET, subscription: new RouteSubscriber(RouteStore.imageHierarchyExport).add('docId'), @@ -29,10 +40,101 @@ export default class ExportManager extends ApiManager { return BuildAndDispatchZip(res, zip => writeHierarchyRecursive(zip, hierarchy)); } }); + + register({ + method: Method.GET, + subscription: new RouteSubscriber("/downloadId").add("docId"), + onValidation: async ({ req, res }) => { + return BuildAndDispatchZip(res, async zip => { + const { id, docs, files } = await getDocs(req.params.docId); + const docString = JSON.stringify({ id, docs }); + zip.append(docString, { name: "doc.json" }); + files.forEach(val => { + zip.file(publicDirectory + val, { name: val.substring(1) }); + }); + }); + } + }); + + register({ + method: Method.GET, + subscription: new RouteSubscriber("/serializeDoc").add("docId"), + onValidation: async ({ req, res }) => { + const { docs, files } = await getDocs(req.params.docId); + res.send({ docs, files: Array.from(files) }); + } + }); + + } } +async function getDocs(id: string) { + const files = new Set(); + const docs: { [id: string]: any } = {}; + const fn = (doc: any): string[] => { + const id = doc.id; + if (typeof id === "string" && id.endsWith("Proto")) { + //Skip protos + return []; + } + const ids: string[] = []; + for (const key in doc.fields) { + if (!doc.fields.hasOwnProperty(key)) { + continue; + } + const field = doc.fields[key]; + if (field === undefined || field === null) { + continue; + } + + if (field.__type === "proxy" || field.__type === "prefetch_proxy") { + ids.push(field.fieldId); + } else if (field.__type === "script" || field.__type === "computed") { + if (field.captures) { + ids.push(field.captures.fieldId); + } + } else if (field.__type === "list") { + ids.push(...fn(field)); + } else if (typeof field === "string") { + const re = /"(?:dataD|d)ocumentId"\s*:\s*"([\w\-]*)"/g; + let match: string[] | null; + while ((match = re.exec(field)) !== null) { + ids.push(match[1]); + } + } else if (field.__type === "RichTextField") { + const re = /"href"\s*:\s*"(.*?)"/g; + let match: string[] | null; + while ((match = re.exec(field.Data)) !== null) { + const urlString = match[1]; + const split = new URL(urlString).pathname.split("doc/"); + if (split.length > 1) { + ids.push(split[split.length - 1]); + } + } + const re2 = /"src"\s*:\s*"(.*?)"/g; + while ((match = re2.exec(field.Data)) !== null) { + const urlString = match[1]; + const pathname = new URL(urlString).pathname; + files.add(pathname); + } + } else if (["audio", "image", "video", "pdf", "web"].includes(field.__type)) { + const url = new URL(field.url); + const pathname = url.pathname; + files.add(pathname); + } + } + + if (doc.id) { + docs[doc.id] = doc; + } + return ids; + }; + await Database.Instance.visit([id], fn); + return { id, docs, files }; +} + /** * This utility function factors out the process * of creating a zip file and sending it back to the client @@ -45,6 +147,8 @@ export default class ExportManager extends ApiManager { * @param mutator the callback function used to actually modify and insert information into the zip instance */ export async function BuildAndDispatchZip(res: express.Response, mutator: ZipMutator): Promise { + res.set('Content-disposition', `attachment;`); + res.set('Content-Type', "application/zip"); const zip = Archiver('zip'); zip.pipe(res); await mutator(zip); @@ -76,7 +180,6 @@ following the general recursive structure shown immediately below } } */ - async function buildHierarchyRecursive(seedId: string, hierarchy: Hierarchy): Promise { const { title, data } = await getData(seedId); const label = `${title} (${seedId})`; @@ -93,9 +196,20 @@ async function buildHierarchyRecursive(seedId: string, hierarchy: Hierarchy): Pr } } -async function getData(seedId: string): Promise { +/** + * This is a very specific utility method to help traverse the database + * to parse data and titles out of images and collections alone. + * + * We don't know if the document id given to is corresponds to a view document or a data + * document. If it's a data document, the response from the database will have + * a data field. If not, call recursively on the proto, and resolve with *its* data + * + * @param targetId the id of the Dash document whose data is being requests + * @returns the data of the document, as well as its title + */ +async function getData(targetId: string): Promise { return new Promise((resolve, reject) => { - Database.Instance.getDocument(seedId, async (result: any) => { + Database.Instance.getDocument(targetId, async (result: any) => { const { data, proto, title } = result.fields; if (data) { if (data.url) { @@ -105,29 +219,50 @@ async function getData(seedId: string): Promise { } else { reject(); } - } - if (proto) { + } else if (proto) { getData(proto.fieldId).then(resolve, reject); + } else { + reject(); } }); }); } +/** + * + * @param file the zip file to which we write the files + * @param hierarchy the data structure from which we read, defining the nesting of the documents in the zip + * @param prefix lets us create nested folders in the zip file by continually appending to the end + * of the prefix with each layer of recursion. + * + * Function Call #1 => "Dash Export" + * Function Call #2 => "Dash Export/a nested collection" + * Function Call #3 => "Dash Export/a nested collection/lowest level collection" + * ... + */ async function writeHierarchyRecursive(file: Archiver.Archiver, hierarchy: Hierarchy, prefix = "Dash Export"): Promise { - for (const key of Object.keys(hierarchy)) { - const result = hierarchy[key]; + for (const documentTitle of Object.keys(hierarchy)) { + const result = hierarchy[documentTitle]; + // base case or leaf node, we've hit a url (image) if (typeof result === "string") { let path: string; let matches: RegExpExecArray | null; if ((matches = /\:1050\/files\/(upload\_[\da-z]{32}.*)/g.exec(result)) !== null) { + // image already exists on our server path = `${__dirname}/public/files/${matches[1]}`; } else { + // the image doesn't already exist on our server (may have been dragged + // and dropped in the browser and thus hosted remotely) so we upload it + // to our server and point the zip file to it, so it can bundle up the bytes const information = await DashUploadUtils.UploadImage(result); path = information.mediaPaths[0]; } - file.file(path, { name: key, prefix }); + // write the file specified by the path to the directory in the + // zip file given by the prefix. + file.file(path, { name: documentTitle, prefix }); } else { - await writeHierarchyRecursive(file, result, `${prefix}/${key}`); + // we've hit a collection, so we have to recurse + await writeHierarchyRecursive(file, result, `${prefix}/${documentTitle}`); } } } \ No newline at end of file diff --git a/src/server/ApiManagers/PDFManager.ts b/src/server/ApiManagers/PDFManager.ts new file mode 100644 index 000000000..f328557b4 --- /dev/null +++ b/src/server/ApiManagers/PDFManager.ts @@ -0,0 +1,107 @@ +import ApiManager, { Registration } from "./ApiManager"; +import { Method } from "../RouteManager"; +import RouteSubscriber from "../RouteSubscriber"; +import { exists, createReadStream, createWriteStream } from "fs"; +import { filesDirectory } from ".."; +import * as Pdfjs from 'pdfjs-dist'; +import { createCanvas } from "canvas"; +const probe = require("probe-image-size"); +import * as express from "express"; +import * as path from "path"; + +export default class PDFManager extends ApiManager { + + protected initialize(register: Registration): void { + + register({ + method: Method.GET, + subscription: new RouteSubscriber("/thumbnail").add("filename"), + onValidation: ({ req, res }) => { + let filename = req.params.filename; + let noExt = filename.substring(0, filename.length - ".png".length); + let pagenumber = parseInt(noExt.split('-')[1]); + return new Promise(resolve => { + exists(filesDirectory + filename, (exists: boolean) => { + console.log(`${filesDirectory + filename} ${exists ? "exists" : "does not exist"}`); + if (exists) { + let input = createReadStream(filesDirectory + filename); + probe(input, (err: any, result: any) => { + if (err) { + console.log(err); + console.log(`error on ${filename}`); + return; + } + res.send({ path: "/files/" + filename, width: result.width, height: result.height }); + }); + } + else { + LoadPage(filesDirectory + filename.substring(0, filename.length - noExt.split('-')[1].length - ".PNG".length - 1) + ".pdf", pagenumber, res); + } + resolve(); + }); + }); + } + }); + + function LoadPage(file: string, pageNumber: number, res: express.Response) { + console.log(file); + Pdfjs.getDocument(file).promise + .then((pdf: Pdfjs.PDFDocumentProxy) => { + let factory = new NodeCanvasFactory(); + console.log(pageNumber); + pdf.getPage(pageNumber).then((page: Pdfjs.PDFPageProxy) => { + console.log("reading " + page); + let viewport = page.getViewport(1 as any); + let canvasAndContext = factory.create(viewport.width, viewport.height); + let renderContext = { + canvasContext: canvasAndContext.context, + viewport: viewport, + canvasFactory: factory + }; + console.log("read " + pageNumber); + + page.render(renderContext).promise + .then(() => { + console.log("saving " + pageNumber); + let stream = canvasAndContext.canvas.createPNGStream(); + let pngFile = `${file.substring(0, file.length - ".pdf".length)}-${pageNumber}.PNG`; + let out = createWriteStream(pngFile); + stream.pipe(out); + out.on("finish", () => { + console.log(`Success! Saved to ${pngFile}`); + let name = path.basename(pngFile); + res.send({ path: "/files/" + name, width: viewport.width, height: viewport.height }); + }); + }, (reason: string) => { + console.error(reason + ` ${pageNumber}`); + }); + }); + }); + } + + } + +} + +class NodeCanvasFactory { + create = (width: number, height: number) => { + var canvas = createCanvas(width, height); + var context = canvas.getContext('2d'); + return { + canvas, + context + }; + } + + reset = (canvasAndContext: any, width: number, height: number) => { + canvasAndContext.canvas.width = width; + canvasAndContext.canvas.height = height; + } + + destroy = (canvasAndContext: any) => { + canvasAndContext.canvas.width = 0; + canvasAndContext.canvas.height = 0; + canvasAndContext.canvas = null; + canvasAndContext.context = null; + } +} \ No newline at end of file diff --git a/src/server/ApiManagers/SearchManager.ts b/src/server/ApiManagers/SearchManager.ts index 1c4b805e5..1c801715a 100644 --- a/src/server/ApiManagers/SearchManager.ts +++ b/src/server/ApiManagers/SearchManager.ts @@ -3,7 +3,7 @@ import { Method } from "../RouteManager"; import { Search } from "../Search"; var findInFiles = require('find-in-files'); import * as path from 'path'; -import { uploadDirectory } from ".."; +import { filesDirectory } from ".."; export default class SearchManager extends ApiManager { @@ -18,7 +18,7 @@ export default class SearchManager extends ApiManager { res.send([]); return; } - let results = await findInFiles.find({ 'term': q, 'flags': 'ig' }, uploadDirectory + "text", ".txt$"); + let results = await findInFiles.find({ 'term': q, 'flags': 'ig' }, filesDirectory + "text", ".txt$"); let resObj: { ids: string[], numFound: number, lines: string[] } = { ids: [], numFound: 0, lines: [] }; for (var result in results) { resObj.ids.push(path.basename(result, ".txt").replace(/upload_/, "")); diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts new file mode 100644 index 000000000..38635eda5 --- /dev/null +++ b/src/server/ApiManagers/UploadManager.ts @@ -0,0 +1,227 @@ +import ApiManager, { Registration } from "./ApiManager"; +import { Method, _success } from "../RouteManager"; +import * as formidable from 'formidable'; +import v4 = require('uuid/v4'); +var AdmZip = require('adm-zip'); +import * as path from 'path'; +import { createReadStream, createWriteStream, unlink, readFileSync } from "fs"; +import { publicDirectory, filesDirectory, Partitions } from ".."; +import { RouteStore } from "../RouteStore"; +import { Database } from "../database"; +import { DashUploadUtils } from "../DashUploadUtils"; +import { Opt } from "../../new_fields/Doc"; +import { ParsedPDF } from "../PdfTypes"; +const pdf = require('pdf-parse'); +import * as sharp from 'sharp'; +import { SharedMediaTypes } from "../SharedMediaTypes"; +const imageDataUri = require('image-data-uri'); + +export default class UploadManager extends ApiManager { + + protected initialize(register: Registration): void { + + register({ + method: Method.POST, + subscription: "/uploadDoc", + onValidation: ({ req, res }) => { + let form = new formidable.IncomingForm(); + form.keepExtensions = true; + // let path = req.body.path; + const ids: { [id: string]: string } = {}; + let remap = true; + const getId = (id: string): string => { + if (!remap) return id; + if (id.endsWith("Proto")) return id; + if (id in ids) { + return ids[id]; + } else { + return ids[id] = v4(); + } + }; + const mapFn = (doc: any) => { + if (doc.id) { + doc.id = getId(doc.id); + } + for (const key in doc.fields) { + if (!doc.fields.hasOwnProperty(key)) { + continue; + } + const field = doc.fields[key]; + if (field === undefined || field === null) { + continue; + } + + if (field.__type === "proxy" || field.__type === "prefetch_proxy") { + field.fieldId = getId(field.fieldId); + } else if (field.__type === "script" || field.__type === "computed") { + if (field.captures) { + field.captures.fieldId = getId(field.captures.fieldId); + } + } else if (field.__type === "list") { + mapFn(field); + } else if (typeof field === "string") { + const re = /("(?:dataD|d)ocumentId"\s*:\s*")([\w\-]*)"/g; + doc.fields[key] = (field as any).replace(re, (match: any, p1: string, p2: string) => { + return `${p1}${getId(p2)}"`; + }); + } else if (field.__type === "RichTextField") { + const re = /("href"\s*:\s*")(.*?)"/g; + field.Data = field.Data.replace(re, (match: any, p1: string, p2: string) => { + return `${p1}${getId(p2)}"`; + }); + } + } + }; + return new Promise(resolve => { + form.parse(req, async (_err, fields, files) => { + remap = fields.remap !== "false"; + let id: string = ""; + try { + for (const name in files) { + const path_2 = files[name].path; + const zip = new AdmZip(path_2); + zip.getEntries().forEach((entry: any) => { + if (!entry.entryName.startsWith("files/")) return; + let dirname = path.dirname(entry.entryName) + "/"; + let extname = path.extname(entry.entryName); + let basename = path.basename(entry.entryName).split(".")[0]; + // zip.extractEntryTo(dirname + basename + "_o" + extname, __dirname + RouteStore.public, true, false); + // zip.extractEntryTo(dirname + basename + "_s" + extname, __dirname + RouteStore.public, true, false); + // zip.extractEntryTo(dirname + basename + "_m" + extname, __dirname + RouteStore.public, true, false); + // zip.extractEntryTo(dirname + basename + "_l" + extname, __dirname + RouteStore.public, true, false); + try { + zip.extractEntryTo(entry.entryName, __dirname + RouteStore.public, true, false); + dirname = "/" + dirname; + + createReadStream(publicDirectory + dirname + basename + extname).pipe(createWriteStream(publicDirectory + dirname + basename + "_o" + extname)); + createReadStream(publicDirectory + dirname + basename + extname).pipe(createWriteStream(publicDirectory + dirname + basename + "_s" + extname)); + createReadStream(publicDirectory + dirname + basename + extname).pipe(createWriteStream(publicDirectory + dirname + basename + "_m" + extname)); + createReadStream(publicDirectory + dirname + basename + extname).pipe(createWriteStream(publicDirectory + dirname + basename + "_l" + extname)); + } catch (e) { + console.log(e); + } + }); + const json = zip.getEntry("doc.json"); + let docs: any; + try { + let data = JSON.parse(json.getData().toString("utf8")); + docs = data.docs; + id = data.id; + docs = Object.keys(docs).map(key => docs[key]); + docs.forEach(mapFn); + await Promise.all(docs.map((doc: any) => new Promise(res => Database.Instance.replace(doc.id, doc, (err, r) => { + err && console.log(err); + res(); + }, true, "newDocuments")))); + } catch (e) { console.log(e); } + unlink(path_2, () => { }); + } + if (id) { + res.send(JSON.stringify(getId(id))); + } else { + res.send(JSON.stringify("error")); + } + } catch (e) { console.log(e); } + resolve(); + }); + }); + } + }); + + + register({ + method: Method.POST, + subscription: RouteStore.upload, + onValidation: async ({ req, res }) => { + let form = new formidable.IncomingForm(); + form.uploadDir = filesDirectory; + form.keepExtensions = true; + return new Promise(resolve => { + form.parse(req, async (_err, _fields, files) => { + let results: DashUploadUtils.ImageFileResponse[] = []; + for (const key in files) { + const { type, path: location, name } = files[key]; + const filename = path.basename(location); + let uploadInformation: Opt; + if (filename.endsWith(".pdf")) { + let dataBuffer = readFileSync(filesDirectory + filename); + const result: ParsedPDF = await pdf(dataBuffer); + await new Promise((resolve, reject) => { + const path = filesDirectory + Partitions.PdfText + "/" + filename.substring(0, filename.length - ".pdf".length) + ".txt"; + createWriteStream(path).write(result.text, error => { + if (!error) { + resolve(); + } else { + reject(error); + } + }); + }); + } else { + uploadInformation = await DashUploadUtils.UploadImage(filesDirectory + filename, filename); + } + const exif = uploadInformation ? uploadInformation.exifData : undefined; + results.push({ name, type, path: `/files/${filename}`, exif }); + } + _success(res, results); + resolve(); + }); + }); + } + }); + + register({ + method: Method.POST, + subscription: RouteStore.inspectImage, + onValidation: async ({ req, res }) => { + const { source } = req.body; + if (typeof source === "string") { + const uploadInformation = await DashUploadUtils.UploadImage(source); + return res.send(await DashUploadUtils.InspectImage(uploadInformation.mediaPaths[0])); + } + res.send({}); + } + }); + + register({ + method: Method.POST, + subscription: RouteStore.dataUriToImage, + onValidation: ({ req, res }) => { + const uri = req.body.uri; + const filename = req.body.name; + if (!uri || !filename) { + res.status(401).send("incorrect parameters specified"); + return; + } + return imageDataUri.outputFile(uri, filesDirectory + filename).then((savedName: string) => { + const ext = path.extname(savedName).toLowerCase(); + const { pngs, jpgs } = SharedMediaTypes; + let resizers = [ + { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, + { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: "_m" }, + { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: "_l" }, + ]; + let isImage = false; + if (pngs.includes(ext)) { + resizers.forEach(element => { + element.resizer = element.resizer.png(); + }); + isImage = true; + } else if (jpgs.includes(ext)) { + resizers.forEach(element => { + element.resizer = element.resizer.jpeg(); + }); + isImage = true; + } + if (isImage) { + resizers.forEach(resizer => { + createReadStream(savedName).pipe(resizer.resizer).pipe(createWriteStream(filesDirectory + filename + resizer.suffix + ext)); + }); + } + res.send("/files/" + filename + ext); + }); + } + }); + + } + +} \ No newline at end of file diff --git a/src/server/ApiManagers/UserManager.ts b/src/server/ApiManagers/UserManager.ts index dd1e50133..fe1ce7f2b 100644 --- a/src/server/ApiManagers/UserManager.ts +++ b/src/server/ApiManagers/UserManager.ts @@ -1,11 +1,36 @@ import ApiManager, { Registration } from "./ApiManager"; import { Method } from "../RouteManager"; import { WebSocket } from "../Websocket/Websocket"; +import { RouteStore } from "../RouteStore"; +import { Database } from "../database"; export default class UserManager extends ApiManager { protected initialize(register: Registration): void { + register({ + method: Method.GET, + subscription: RouteStore.getUsers, + onValidation: async ({ res }) => { + const cursor = await Database.Instance.query({}, { email: 1, userDocumentId: 1 }, "users"); + const results = await cursor.toArray(); + res.send(results.map(user => ({ email: user.email, userDocumentId: user.userDocumentId }))); + } + }); + + register({ + method: Method.GET, + subscription: RouteStore.getUserDocumentId, + onValidation: ({ res, user }) => res.send(user.userDocumentId) + }); + + register({ + method: Method.GET, + subscription: RouteStore.getCurrUser, + onValidation: ({ res, user }) => res.send(JSON.stringify(user)), + onUnauthenticated: ({ res }) => res.send(JSON.stringify({ id: "__guest__", email: "" })) + }); + register({ method: Method.GET, subscription: "/whosOnline", @@ -17,7 +42,7 @@ export default class UserManager extends ApiManager { for (const user in timeMap) { const time = timeMap[user]; const key = ((now - time) / 1000) < (60 * 5) ? "active" : "inactive"; - users[key][user] = `Last active ${this.msToTime(now - time)} ago`; + users[key][user] = `Last active ${msToTime(now - time)} ago`; } res.send(users); @@ -26,17 +51,17 @@ export default class UserManager extends ApiManager { } - private msToTime(duration: number) { - let milliseconds = Math.floor((duration % 1000) / 100), - seconds = Math.floor((duration / 1000) % 60), - minutes = Math.floor((duration / (1000 * 60)) % 60), - hours = Math.floor((duration / (1000 * 60 * 60)) % 24); +} - let hoursS = (hours < 10) ? "0" + hours : hours; - let minutesS = (minutes < 10) ? "0" + minutes : minutes; - let secondsS = (seconds < 10) ? "0" + seconds : seconds; +function msToTime(duration: number) { + let milliseconds = Math.floor((duration % 1000) / 100), + seconds = Math.floor((duration / 1000) % 60), + minutes = Math.floor((duration / (1000 * 60)) % 60), + hours = Math.floor((duration / (1000 * 60 * 60)) % 24); - return hoursS + ":" + minutesS + ":" + secondsS + "." + milliseconds; - } + let hoursS = (hours < 10) ? "0" + hours : hours; + let minutesS = (minutes < 10) ? "0" + minutes : minutes; + let secondsS = (seconds < 10) ? "0" + seconds : seconds; + return hoursS + ":" + minutesS + ":" + secondsS + "." + milliseconds; } \ No newline at end of file diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index 9fddb466c..8f5b0e1a8 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -5,6 +5,7 @@ import * as sharp from 'sharp'; import request = require('request-promise'); import { ExifData, ExifImage } from 'exif'; import { Opt } from '../new_fields/Doc'; +import { SharedMediaTypes } from './SharedMediaTypes'; const uploadDirectory = path.join(__dirname, './public/files/'); @@ -15,20 +16,21 @@ export namespace DashUploadUtils { suffix: string; } + export interface ImageFileResponse { + name: string; + path: string; + type: string; + exif: Opt; + } + export const Sizes: { [size: string]: Size } = { SMALL: { width: 100, suffix: "_s" }, MEDIUM: { width: 400, suffix: "_m" }, LARGE: { width: 900, suffix: "_l" }, }; - const gifs = [".gif"]; - const pngs = [".png"]; - const jpgs = [".jpg", ".jpeg"]; - const imageFormats = [...pngs, ...jpgs, ...gifs]; - const videoFormats = [".mov", ".mp4"]; - export function validateExtension(url: string) { - return imageFormats.includes(path.extname(url).toLowerCase()); + return SharedMediaTypes.imageFormats.includes(path.extname(url).toLowerCase()); } const size = "content-length"; @@ -132,6 +134,7 @@ export namespace DashUploadUtils { contentSize, contentType, }; + const { pngs, imageFormats, jpgs, videoFormats } = SharedMediaTypes; return new Promise(async (resolve, reject) => { const resizers = [ { resizer: sharp().rotate(), suffix: "_o" }, diff --git a/src/server/SharedMediaTypes.ts b/src/server/SharedMediaTypes.ts new file mode 100644 index 000000000..3d3234125 --- /dev/null +++ b/src/server/SharedMediaTypes.ts @@ -0,0 +1,9 @@ +export namespace SharedMediaTypes { + + export const gifs = [".gif"]; + export const pngs = [".png"]; + export const jpgs = [".jpg", ".jpeg"]; + export const imageFormats = [...pngs, ...jpgs, ...gifs]; + export const videoFormats = [".mov", ".mp4"]; + +} \ No newline at end of file diff --git a/src/server/Websocket/Websocket.ts b/src/server/Websocket/Websocket.ts index cd2813d99..f6a6c8718 100644 --- a/src/server/Websocket/Websocket.ts +++ b/src/server/Websocket/Websocket.ts @@ -6,7 +6,9 @@ import { Database } from "../database"; import { Search } from "../Search"; import * as io from 'socket.io'; import YoutubeApi from "../apis/youtube/youtubeApiSample"; -import { youtubeApiKey } from ".."; +import { readFile } from "fs"; +import { Credentials } from "google-auth-library"; +import { GoogleCredentialsLoader } from "../credentials/CredentialsLoader"; export namespace WebSocket { @@ -18,6 +20,14 @@ export namespace WebSocket { export const socketMap = new Map(); export const timeMap: { [id: string]: number } = {}; + export async function start(serverPort: number, isRelease: boolean) { + await preliminaryFunctions(); + initialize(serverPort, isRelease); + } + + async function preliminaryFunctions() { + } + export function initialize(serverPort: number, isRelease: boolean) { const endpoint = io(); endpoint.listen(serverPort); @@ -54,14 +64,15 @@ export namespace WebSocket { } function HandleYoutubeQuery([query, callback]: [YoutubeQueryInput, (result?: any[]) => void]) { + const { ProjectCredentials } = GoogleCredentialsLoader; switch (query.type) { case YoutubeQueryTypes.Channels: - YoutubeApi.authorizedGetChannel(youtubeApiKey); + YoutubeApi.authorizedGetChannel(ProjectCredentials); break; case YoutubeQueryTypes.SearchVideo: - YoutubeApi.authorizedGetVideos(youtubeApiKey, query.userInput, callback); + YoutubeApi.authorizedGetVideos(ProjectCredentials, query.userInput, callback); case YoutubeQueryTypes.VideoDetails: - YoutubeApi.authorizedGetVideoDetails(youtubeApiKey, query.videoIds, callback); + YoutubeApi.authorizedGetVideoDetails(ProjectCredentials, query.videoIds, callback); } } diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index 35a2541a9..b3657ee43 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -1,12 +1,11 @@ import { google } from "googleapis"; -import { readFile } from "fs"; import { OAuth2Client, Credentials, OAuth2ClientOptions } from "google-auth-library"; import { Opt } from "../../../new_fields/Doc"; import { GaxiosResponse } from "gaxios"; import request = require('request-promise'); import * as qs from 'query-string'; import { Database } from "../../database"; -import * as path from "path"; +import { GoogleCredentialsLoader } from "../../credentials/CredentialsLoader"; /** * Scopes give Google users fine granularity of control @@ -61,6 +60,23 @@ export namespace GoogleApiServerUtils { */ let worker: OAuth2Client; + /** + * This function is called once before the server is started, + * reading in Dash's project-specific credentials (client secret + * and client id) for later repeated access. It also sets up the + * global, intentionally unauthenticated worker OAuth2 client instance. + */ + export function processProjectCredentials(): void { + const { client_secret, client_id, redirect_uris } = GoogleCredentialsLoader.ProjectCredentials; + // initialize the global authorization client + installed = { + clientId: client_id, + clientSecret: client_secret, + redirectUri: redirect_uris[0] + }; + worker = generateClient(); + } + /** * A briefer format for the response from a 'googleapis' API request */ @@ -96,32 +112,6 @@ export namespace GoogleApiServerUtils { batchUpdate: ApiHandler; } - /** - * This function is called once before the server is started, - * reading in Dash's project-specific credentials (client secret - * and client id) for later repeated access. It also sets up the - * global, intentionally unauthenticated worker OAuth2 client instance. - */ - export async function loadClientSecret(): Promise { - return new Promise((resolve, reject) => { - readFile(path.join(__dirname, "../../credentials/google_docs_credentials.json"), async (err, projectCredentials) => { - if (err) { - reject(err); - return console.log('Error loading client secret file:', err); - } - const { client_secret, client_id, redirect_uris } = JSON.parse(projectCredentials.toString()).installed; - // initialize the global authorization client - installed = { - clientId: client_id, - clientSecret: client_secret, - redirectUri: redirect_uris[0] - }; - worker = generateClient(); - resolve(); - }); - }); - } - /** * Maps the Dash user id of a given user to their single * associated OAuth2 client, mitigating the creation diff --git a/src/server/apis/google/GooglePhotosUploadUtils.ts b/src/server/apis/google/GooglePhotosUploadUtils.ts index d8cf795b5..0abed3f1d 100644 --- a/src/server/apis/google/GooglePhotosUploadUtils.ts +++ b/src/server/apis/google/GooglePhotosUploadUtils.ts @@ -1,7 +1,6 @@ import request = require('request-promise'); import * as path from 'path'; -import { MediaItemCreationResult, NewMediaItemResult } from './SharedTypes'; -import { NewMediaItem } from "../../index"; +import { NewMediaItemResult } from './SharedTypes'; import { BatchedArray, TimeUnit } from 'array-batcher'; import { DashUploadUtils } from '../../DashUploadUtils'; @@ -28,6 +27,22 @@ export namespace GooglePhotosUploadUtils { description: string; } + /** + * This is the format needed to pass + * into the BatchCreate API request + * to take a reference to raw uploaded bytes + * and actually create an image in Google Photos. + * + * So, to instantiate this interface you must have already dispatched an upload + * and received an upload token. + */ + export interface NewMediaItem { + description: string; + simpleMediaItem: { + uploadToken: string; + }; + } + /** * A utility function to streamline making * calls to the API's url - accentuates diff --git a/src/server/credentials/CredentialsLoader.ts b/src/server/credentials/CredentialsLoader.ts new file mode 100644 index 000000000..e3f4d167b --- /dev/null +++ b/src/server/credentials/CredentialsLoader.ts @@ -0,0 +1,29 @@ +import { readFile } from "fs"; + +export namespace GoogleCredentialsLoader { + + export interface InstalledCredentials { + client_id: string; + project_id: string; + auth_uri: string; + token_uri: string; + auth_provider_x509_cert_url: string; + client_secret: string; + redirect_uris: string[]; + } + + export let ProjectCredentials: InstalledCredentials; + + export async function loadCredentials() { + ProjectCredentials = await new Promise(resolve => { + readFile(__dirname + '/google_project_credentials.json', function processClientSecrets(err, content) { + if (err) { + console.log('Error loading client secret file: ' + err); + return; + } + resolve(JSON.parse(content.toString()).installed); + }); + }); + } + +} diff --git a/src/server/credentials/google_docs_credentials.json b/src/server/credentials/google_docs_credentials.json deleted file mode 100644 index 955c5a3c1..000000000 --- a/src/server/credentials/google_docs_credentials.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "installed": { - "client_id": "343179513178-ud6tvmh275r2fq93u9eesrnc66t6akh9.apps.googleusercontent.com", - "project_id": "quickstart-1565056383187", - "auth_uri": "https://accounts.google.com/o/oauth2/auth", - "token_uri": "https://oauth2.googleapis.com/token", - "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", - "client_secret": "w8KIFSc0MQpmUYHed4qEzn8b", - "redirect_uris": ["urn:ietf:wg:oauth:2.0:oob", "http://localhost"] - } -} \ No newline at end of file diff --git a/src/server/credentials/google_project_credentials.json b/src/server/credentials/google_project_credentials.json new file mode 100644 index 000000000..5d9c62eb1 --- /dev/null +++ b/src/server/credentials/google_project_credentials.json @@ -0,0 +1,14 @@ +{ + "installed": { + "client_id": "1005546247619-kqpnvh42mpa803tem8556b87umi4j9r0.apps.googleusercontent.com", + "project_id": "brown-dash", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://oauth2.googleapis.com/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_secret": "WshLb5TH9SdFVGGbQcnYj7IU", + "redirect_uris": [ + "urn:ietf:wg:oauth:2.0:oob", + "http://localhost" + ] + } +} \ No newline at end of file diff --git a/src/server/credentials/test.json b/src/server/credentials/test.json new file mode 100644 index 000000000..0a032cc2d --- /dev/null +++ b/src/server/credentials/test.json @@ -0,0 +1,14 @@ +{ + "installed": { + "client_id": "343179513178-ud6tvmh275r2fq93u9eesrnc66t6akh9.apps.googleusercontent.com", + "project_id": "quickstart-1565056383187", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://oauth2.googleapis.com/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_secret": "w8KIFSc0MQpmUYHed4qEzn8b", + "redirect_uris": [ + "urn:ietf:wg:oauth:2.0:oob", + "http://localhost" + ] + } +} \ No newline at end of file diff --git a/src/server/index.ts b/src/server/index.ts index 25697e71f..8eb88cf8b 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -1,352 +1,75 @@ require('dotenv').config(); -import * as formidable from 'formidable'; -import * as fs from 'fs'; -import * as sharp from 'sharp'; -import * as Pdfjs from 'pdfjs-dist'; -const imageDataUri = require('image-data-uri'); +import { GoogleApiServerUtils } from "./apis/google/GoogleApiServerUtils"; import * as mobileDetect from 'mobile-detect'; import * as path from 'path'; import { Database } from './database'; import { RouteStore } from './RouteStore'; -import v4 = require('uuid/v4'); -import { createCanvas } from "canvas"; const serverPort = 4321; -import { Search } from './Search'; -import * as Archiver from 'archiver'; -var AdmZip = require('adm-zip'); -import * as YoutubeApi from "./apis/youtube/youtubeApiSample"; -import { Response } from 'express-serve-static-core'; -import { GoogleApiServerUtils } from "./apis/google/GoogleApiServerUtils"; -const probe = require("probe-image-size"); -const pdf = require('pdf-parse'); import { GooglePhotosUploadUtils } from './apis/google/GooglePhotosUploadUtils'; import { Opt } from '../new_fields/Doc'; import { DashUploadUtils } from './DashUploadUtils'; import { BatchedArray, TimeUnit } from 'array-batcher'; -import { ParsedPDF } from "./PdfTypes"; -import { reject } from 'bluebird'; import RouteSubscriber from './RouteSubscriber'; -import InitializeServer from './Initialization'; +import initializeServer from './Initialization'; import RouteManager, { Method, _success, _permission_denied, _error, _invalid, OnUnauthenticated } from './RouteManager'; import * as qs from 'query-string'; import UtilManager from './ApiManagers/UtilManager'; import SearchManager from './ApiManagers/SearchManager'; import UserManager from './ApiManagers/UserManager'; import { WebSocket } from './Websocket/Websocket'; -import ExportManager from './ApiManagers/ExportManager'; -import ApiManager from './ApiManagers/ApiManager'; - -export let youtubeApiKey: string; - -export interface NewMediaItem { - description: string; - simpleMediaItem: { - uploadToken: string; - }; +import DownloadManager from './ApiManagers/ExportManager'; +import { GoogleCredentialsLoader } from './credentials/CredentialsLoader'; +import DeleteManager from "./ApiManagers/DeleteManager"; +import PDFManager from "./ApiManagers/PDFManager"; +import UploadManager from "./ApiManagers/UploadManager"; + +export const publicDirectory = __dirname + RouteStore.public; +export const filesDirectory = publicDirectory + "/files/"; +export enum Partitions { + PdfText = "pdf_text" } -const pngTypes = [".png", ".PNG"]; -const jpgTypes = [".jpg", ".JPG", ".jpeg", ".JPEG"]; -export const uploadDirectory = __dirname + "/public/files/"; -const pdfDirectory = uploadDirectory + "text"; -const solrURL = "http://localhost:8983/solr/#/dash"; - -start(); - -async function start() { - await PreliminaryFunctions(); - await InitializeServer({ listenAtPort: 1050, routeSetter }); -} - -async function PreliminaryFunctions() { - await new Promise(resolve => { - YoutubeApi.readApiKey((apiKey: string) => { - youtubeApiKey = apiKey; - resolve(); - }); - }); - await GoogleApiServerUtils.loadClientSecret(); - await DashUploadUtils.createIfNotExists(pdfDirectory); +/** + * These are the functions run before the server starts + * listening. Anything that must be complete + * before clients can access the server should be run or awaited here. + */ +async function preliminaryFunctions() { + // make project credentials globally accessible + await GoogleCredentialsLoader.loadCredentials(); + // read the resulting credentials into a different namespace + GoogleApiServerUtils.processProjectCredentials(); + // divide the public directory based on type + await Promise.all(Object.keys(Partitions).map(partition => DashUploadUtils.createIfNotExists(filesDirectory + partition))); + // connect to the database await Database.tryInitializeConnection(); } +/** + * Either clustered together as an API manager + * or individually referenced below, by the completion + * of this function's execution, all routes will + * be registered on the server + * @param router the instance of the route manager + * that will manage the registration of new routes + * with the server + */ function routeSetter(router: RouteManager) { - const managers: ApiManager[] = [ - new UtilManager(), - new SearchManager(), + // initialize API Managers + [ new UserManager(), - new ExportManager() - ]; - managers.forEach(manager => manager.register(router)); + new UploadManager(), + new DownloadManager(), + new SearchManager(), + new PDFManager(), + new DeleteManager(), + new UtilManager() + ].forEach(manager => manager.register(router)); + // initialize the web socket (bidirectional communication: if a user changes + // a field on one client, that change must be broadcast to all other clients) WebSocket.initialize(serverPort, router.isRelease); - async function getDocs(id: string) { - const files = new Set(); - const docs: { [id: string]: any } = {}; - const fn = (doc: any): string[] => { - const id = doc.id; - if (typeof id === "string" && id.endsWith("Proto")) { - //Skip protos - return []; - } - const ids: string[] = []; - for (const key in doc.fields) { - if (!doc.fields.hasOwnProperty(key)) { - continue; - } - const field = doc.fields[key]; - if (field === undefined || field === null) { - continue; - } - - if (field.__type === "proxy" || field.__type === "prefetch_proxy") { - ids.push(field.fieldId); - } else if (field.__type === "script" || field.__type === "computed") { - if (field.captures) { - ids.push(field.captures.fieldId); - } - } else if (field.__type === "list") { - ids.push(...fn(field)); - } else if (typeof field === "string") { - const re = /"(?:dataD|d)ocumentId"\s*:\s*"([\w\-]*)"/g; - let match: string[] | null; - while ((match = re.exec(field)) !== null) { - ids.push(match[1]); - } - } else if (field.__type === "RichTextField") { - const re = /"href"\s*:\s*"(.*?)"/g; - let match: string[] | null; - while ((match = re.exec(field.Data)) !== null) { - const urlString = match[1]; - const split = new URL(urlString).pathname.split("doc/"); - if (split.length > 1) { - ids.push(split[split.length - 1]); - } - } - const re2 = /"src"\s*:\s*"(.*?)"/g; - while ((match = re2.exec(field.Data)) !== null) { - const urlString = match[1]; - const pathname = new URL(urlString).pathname; - files.add(pathname); - } - } else if (["audio", "image", "video", "pdf", "web"].includes(field.__type)) { - const url = new URL(field.url); - const pathname = url.pathname; - files.add(pathname); - } - } - - if (doc.id) { - docs[doc.id] = doc; - } - return ids; - }; - await Database.Instance.visit([id], fn); - return { id, docs, files }; - } - - router.addSupervisedRoute({ - method: Method.GET, - subscription: new RouteSubscriber("/serializeDoc").add("docId"), - onValidation: async ({ req, res }) => { - const { docs, files } = await getDocs(req.params.docId); - res.send({ docs, files: Array.from(files) }); - } - }); - - router.addSupervisedRoute({ - method: Method.GET, - subscription: new RouteSubscriber("/downloadId").add("docId"), - onValidation: async ({ req, res }) => { - res.set('Content-disposition', `attachment;`); - res.set('Content-Type', "application/zip"); - const { id, docs, files } = await getDocs(req.params.docId); - const docString = JSON.stringify({ id, docs }); - const zip = Archiver('zip'); - zip.pipe(res); - zip.append(docString, { name: "doc.json" }); - files.forEach(val => { - zip.file(__dirname + RouteStore.public + val, { name: val.substring(1) }); - }); - zip.finalize(); - } - }); - - router.addSupervisedRoute({ - method: Method.POST, - subscription: "/uploadDoc", - onValidation: ({ req, res }) => { - let form = new formidable.IncomingForm(); - form.keepExtensions = true; - // let path = req.body.path; - const ids: { [id: string]: string } = {}; - let remap = true; - const getId = (id: string): string => { - if (!remap) return id; - if (id.endsWith("Proto")) return id; - if (id in ids) { - return ids[id]; - } else { - return ids[id] = v4(); - } - }; - const mapFn = (doc: any) => { - if (doc.id) { - doc.id = getId(doc.id); - } - for (const key in doc.fields) { - if (!doc.fields.hasOwnProperty(key)) { - continue; - } - const field = doc.fields[key]; - if (field === undefined || field === null) { - continue; - } - - if (field.__type === "proxy" || field.__type === "prefetch_proxy") { - field.fieldId = getId(field.fieldId); - } else if (field.__type === "script" || field.__type === "computed") { - if (field.captures) { - field.captures.fieldId = getId(field.captures.fieldId); - } - } else if (field.__type === "list") { - mapFn(field); - } else if (typeof field === "string") { - const re = /("(?:dataD|d)ocumentId"\s*:\s*")([\w\-]*)"/g; - doc.fields[key] = (field as any).replace(re, (match: any, p1: string, p2: string) => { - return `${p1}${getId(p2)}"`; - }); - } else if (field.__type === "RichTextField") { - const re = /("href"\s*:\s*")(.*?)"/g; - field.Data = field.Data.replace(re, (match: any, p1: string, p2: string) => { - return `${p1}${getId(p2)}"`; - }); - } - } - }; - return new Promise(resolve => { - form.parse(req, async (_err, fields, files) => { - remap = fields.remap !== "false"; - let id: string = ""; - try { - for (const name in files) { - const path_2 = files[name].path; - const zip = new AdmZip(path_2); - zip.getEntries().forEach((entry: any) => { - if (!entry.entryName.startsWith("files/")) return; - let dirname = path.dirname(entry.entryName) + "/"; - let extname = path.extname(entry.entryName); - let basename = path.basename(entry.entryName).split(".")[0]; - // zip.extractEntryTo(dirname + basename + "_o" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_s" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_m" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_l" + extname, __dirname + RouteStore.public, true, false); - try { - zip.extractEntryTo(entry.entryName, __dirname + RouteStore.public, true, false); - dirname = "/" + dirname; - - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_o" + extname)); - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_s" + extname)); - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_m" + extname)); - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_l" + extname)); - } catch (e) { - console.log(e); - } - }); - const json = zip.getEntry("doc.json"); - let docs: any; - try { - let data = JSON.parse(json.getData().toString("utf8")); - docs = data.docs; - id = data.id; - docs = Object.keys(docs).map(key => docs[key]); - docs.forEach(mapFn); - await Promise.all(docs.map((doc: any) => new Promise(res => Database.Instance.replace(doc.id, doc, (err, r) => { - err && console.log(err); - res(); - }, true, "newDocuments")))); - } catch (e) { console.log(e); } - fs.unlink(path_2, () => { }); - } - if (id) { - res.send(JSON.stringify(getId(id))); - } else { - res.send(JSON.stringify("error")); - } - } catch (e) { console.log(e); } - resolve(); - }); - }); - } - }); - - router.addSupervisedRoute({ - method: Method.GET, - subscription: new RouteSubscriber("/thumbnail").add("filename"), - onValidation: ({ req, res }) => { - let filename = req.params.filename; - let noExt = filename.substring(0, filename.length - ".png".length); - let pagenumber = parseInt(noExt.split('-')[1]); - return new Promise(resolve => { - fs.exists(uploadDirectory + filename, (exists: boolean) => { - console.log(`${uploadDirectory + filename} ${exists ? "exists" : "does not exist"}`); - if (exists) { - let input = fs.createReadStream(uploadDirectory + filename); - probe(input, (err: any, result: any) => { - if (err) { - console.log(err); - console.log(`error on ${filename}`); - return; - } - res.send({ path: "/files/" + filename, width: result.width, height: result.height }); - }); - } - else { - LoadPage(uploadDirectory + filename.substring(0, filename.length - noExt.split('-')[1].length - ".PNG".length - 1) + ".pdf", pagenumber, res); - } - resolve(); - }); - }); - } - }); - - function LoadPage(file: string, pageNumber: number, res: Response) { - console.log(file); - Pdfjs.getDocument(file).promise - .then((pdf: Pdfjs.PDFDocumentProxy) => { - let factory = new NodeCanvasFactory(); - console.log(pageNumber); - pdf.getPage(pageNumber).then((page: Pdfjs.PDFPageProxy) => { - console.log("reading " + page); - let viewport = page.getViewport(1 as any); - let canvasAndContext = factory.create(viewport.width, viewport.height); - let renderContext = { - canvasContext: canvasAndContext.context, - viewport: viewport, - canvasFactory: factory - }; - console.log("read " + pageNumber); - - page.render(renderContext).promise - .then(() => { - console.log("saving " + pageNumber); - let stream = canvasAndContext.canvas.createPNGStream(); - let pngFile = `${file.substring(0, file.length - ".pdf".length)}-${pageNumber}.PNG`; - let out = fs.createWriteStream(pngFile); - stream.pipe(out); - out.on("finish", () => { - console.log(`Success! Saved to ${pngFile}`); - let name = path.basename(pngFile); - res.send({ path: "/files/" + name, width: viewport.width, height: viewport.height }); - }); - }, (reason: string) => { - console.error(reason + ` ${pageNumber}`); - }); - }); - }); - } - /** * Anyone attempting to navigate to localhost at this port will * first have to log in. @@ -357,16 +80,6 @@ function routeSetter(router: RouteManager) { onValidation: ({ res }) => res.redirect(RouteStore.home) }); - router.addSupervisedRoute({ - method: Method.GET, - subscription: RouteStore.getUsers, - onValidation: async ({ res }) => { - const cursor = await Database.Instance.query({}, { email: 1, userDocumentId: 1 }, "users"); - const results = await cursor.toArray(); - res.send(results.map(user => ({ email: user.email, userDocumentId: user.userDocumentId }))); - } - }); - const serve: OnUnauthenticated = ({ req, res }) => { let detector = new mobileDetect(req.headers['user-agent'] || ""); let filename = detector.mobile() !== null ? 'mobile/image.html' : 'index.html'; @@ -387,19 +100,6 @@ function routeSetter(router: RouteManager) { } }); - router.addSupervisedRoute({ - method: Method.GET, - subscription: RouteStore.getUserDocumentId, - onValidation: ({ res, user }) => res.send(user.userDocumentId) - }); - - router.addSupervisedRoute({ - method: Method.GET, - subscription: RouteStore.getCurrUser, - onValidation: ({ res, user }) => res.send(JSON.stringify(user)), - onUnauthenticated: ({ res }) => res.send(JSON.stringify({ id: "__guest__", email: "" })) - }); - const ServicesApiKeyMap = new Map([ ["face", process.env.FACE], ["vision", process.env.VISION], @@ -415,152 +115,6 @@ function routeSetter(router: RouteManager) { } }); - class NodeCanvasFactory { - create = (width: number, height: number) => { - var canvas = createCanvas(width, height); - var context = canvas.getContext('2d'); - return { - canvas, - context - }; - } - - reset = (canvasAndContext: any, width: number, height: number) => { - canvasAndContext.canvas.width = width; - canvasAndContext.canvas.height = height; - } - - destroy = (canvasAndContext: any) => { - canvasAndContext.canvas.width = 0; - canvasAndContext.canvas.height = 0; - canvasAndContext.canvas = null; - canvasAndContext.context = null; - } - } - - interface ImageFileResponse { - name: string; - path: string; - type: string; - exif: Opt; - } - - router.addSupervisedRoute({ - method: Method.POST, - subscription: RouteStore.upload, - onValidation: async ({ req, res }) => { - let form = new formidable.IncomingForm(); - form.uploadDir = uploadDirectory; - form.keepExtensions = true; - return new Promise(resolve => { - form.parse(req, async (_err, _fields, files) => { - let results: ImageFileResponse[] = []; - for (const key in files) { - const { type, path: location, name } = files[key]; - const filename = path.basename(location); - let uploadInformation: Opt; - if (filename.endsWith(".pdf")) { - let dataBuffer = fs.readFileSync(uploadDirectory + filename); - const result: ParsedPDF = await pdf(dataBuffer); - await new Promise(resolve => { - const path = pdfDirectory + "/" + filename.substring(0, filename.length - ".pdf".length) + ".txt"; - fs.createWriteStream(path).write(result.text, error => { - if (!error) { - resolve(); - } else { - reject(error); - } - }); - }); - } else { - uploadInformation = await DashUploadUtils.UploadImage(uploadDirectory + filename, filename); - } - const exif = uploadInformation ? uploadInformation.exifData : undefined; - results.push({ name, type, path: `/files/${filename}`, exif }); - } - _success(res, results); - resolve(); - }); - }); - } - }); - - router.addSupervisedRoute({ - method: Method.POST, - subscription: RouteStore.inspectImage, - onValidation: async ({ req, res }) => { - const { source } = req.body; - if (typeof source === "string") { - const uploadInformation = await DashUploadUtils.UploadImage(source); - return res.send(await DashUploadUtils.InspectImage(uploadInformation.mediaPaths[0])); - } - res.send({}); - } - }); - - router.addSupervisedRoute({ - method: Method.POST, - subscription: RouteStore.dataUriToImage, - onValidation: ({ req, res }) => { - const uri = req.body.uri; - const filename = req.body.name; - if (!uri || !filename) { - res.status(401).send("incorrect parameters specified"); - return; - } - return imageDataUri.outputFile(uri, uploadDirectory + filename).then((savedName: string) => { - const ext = path.extname(savedName); - let resizers = [ - { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, - { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: "_m" }, - { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: "_l" }, - ]; - let isImage = false; - if (pngTypes.includes(ext)) { - resizers.forEach(element => { - element.resizer = element.resizer.png(); - }); - isImage = true; - } else if (jpgTypes.includes(ext)) { - resizers.forEach(element => { - element.resizer = element.resizer.jpeg(); - }); - isImage = true; - } - if (isImage) { - resizers.forEach(resizer => { - fs.createReadStream(savedName).pipe(resizer.resizer).pipe(fs.createWriteStream(uploadDirectory + filename + resizer.suffix + ext)); - }); - } - res.send("/files/" + filename + ext); - }); - } - }); - - router.addSupervisedRoute({ - method: Method.GET, - subscription: RouteStore.delete, - onValidation: async ({ res, isRelease }) => { - if (isRelease) { - return _permission_denied(res, deletionPermissionError); - } - await WebSocket.deleteFields(); - res.redirect(RouteStore.home); - } - }); - - router.addSupervisedRoute({ - method: Method.GET, - subscription: RouteStore.deleteAll, - onValidation: async ({ res, isRelease }) => { - if (isRelease) { - return _permission_denied(res, deletionPermissionError); - } - await WebSocket.deleteAll(); - res.redirect(RouteStore.home); - } - }); - const EndpointHandlerMap = new Map([ ["create", (api, params) => api.create(params)], ["retrieve", (api, params) => api.get(params)], @@ -628,7 +182,7 @@ function routeSetter(router: RouteManager) { let failed: GooglePhotosUploadFailure[] = []; const batched = BatchedArray.from(media, { batchSize: 25 }); - const newMediaItems = await batched.batchedMapPatientInterval( + const newMediaItems = await batched.batchedMapPatientInterval( { magnitude: 100, unit: TimeUnit.Milliseconds }, async (batch, collector, { completedBatches }) => { for (let index = 0; index < batch.length; index++) { @@ -668,31 +222,6 @@ function routeSetter(router: RouteManager) { const downloadError = "Encountered an error while executing downloads."; const requestError = "Unable to execute download: the body's media items were malformed."; - const deletionPermissionError = "Cannot perform specialized delete outside of the development environment!"; - - router.addSupervisedRoute({ - method: Method.GET, - subscription: "/deleteWithAux", - onValidation: async ({ res, isRelease }) => { - if (isRelease) { - return _permission_denied(res, deletionPermissionError); - } - await Database.Auxiliary.DeleteAll(); - res.redirect(RouteStore.delete); - } - }); - - router.addSupervisedRoute({ - method: Method.GET, - subscription: "/deleteWithGoogleCredentials", - onValidation: async ({ res, isRelease }) => { - if (isRelease) { - return _permission_denied(res, deletionPermissionError); - } - await Database.Auxiliary.GoogleAuthenticationToken.DeleteAll(); - res.redirect(RouteStore.delete); - } - }); const UploadError = (count: number) => `Unable to upload ${count} images to Dash's server`; router.addSupervisedRoute({ @@ -726,4 +255,9 @@ function routeSetter(router: RouteManager) { _invalid(res, requestError); } }); -} \ No newline at end of file +} + +(async function start() { + await preliminaryFunctions(); + await initializeServer({ listenAtPort: 1050, routeSetter }); +})(); -- cgit v1.2.3-70-g09d2 From ed0657b6b0ca1566af06038e418feda8a680419d Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sat, 9 Nov 2019 16:23:15 -0500 Subject: added more partitions --- src/server/index.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) (limited to 'src') diff --git a/src/server/index.ts b/src/server/index.ts index 8eb88cf8b..aec301a74 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -26,7 +26,9 @@ import UploadManager from "./ApiManagers/UploadManager"; export const publicDirectory = __dirname + RouteStore.public; export const filesDirectory = publicDirectory + "/files/"; export enum Partitions { - PdfText = "pdf_text" + pdf_text, + images, + videos } /** -- cgit v1.2.3-70-g09d2 From ee4910b1a90284c71ebdaa5fbd9243148ae113f6 Mon Sep 17 00:00:00 2001 From: Mohammad Amoush Date: Sat, 9 Nov 2019 16:29:20 -0500 Subject: initial --- src/server/ApiManagers/GeneralGoogleManager.ts | 48 ++++++++++ src/server/ApiManagers/GooglePhotosManager.ts | 108 ++++++++++++++++++++++ src/server/index.ts | 123 ------------------------- 3 files changed, 156 insertions(+), 123 deletions(-) create mode 100644 src/server/ApiManagers/GeneralGoogleManager.ts create mode 100644 src/server/ApiManagers/GooglePhotosManager.ts (limited to 'src') diff --git a/src/server/ApiManagers/GeneralGoogleManager.ts b/src/server/ApiManagers/GeneralGoogleManager.ts new file mode 100644 index 000000000..cb37b0dce --- /dev/null +++ b/src/server/ApiManagers/GeneralGoogleManager.ts @@ -0,0 +1,48 @@ +import ApiManager, { Registration } from "./ApiManager"; +import { Method, _permission_denied } from "../RouteManager"; +import { uploadDirectory } from ".."; +import { path } from "animejs"; +import { RouteStore } from "../RouteStore"; +import { GoogleApiServerUtils } from "../apis/google/GoogleApiServerUtils"; +import { Database } from "../database"; + +const deletionPermissionError = "Cannot perform specialized delete outside of the development environment!"; + +export default class GeneralGoogleManager extends ApiManager { + + protected initialize(register: Registration): void { + + register({ + method: Method.GET, + subscription: RouteStore.readGoogleAccessToken, + onValidation: async ({ user, res }) => { + const userId = user.id; + const token = await GoogleApiServerUtils.retrieveAccessToken(userId); + if (!token) { + return res.send(GoogleApiServerUtils.generateAuthenticationUrl()); + } + return res.send(token); + } + }); + + register({ + method: Method.POST, + subscription: RouteStore.writeGoogleAccessToken, + onValidation: async ({ user, req, res }) => { + res.send(await GoogleApiServerUtils.processNewUser(user.id, req.body.authenticationCode)); + } + }); + + register({ + method: Method.GET, + subscription: "/deleteWithGoogleCredentials", + onValidation: async ({ res, isRelease }) => { + if (isRelease) { + return _permission_denied(res, deletionPermissionError); + } + await Database.Auxiliary.GoogleAuthenticationToken.DeleteAll(); + res.redirect(RouteStore.delete); + } + }); + } +} \ No newline at end of file diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts new file mode 100644 index 000000000..b5e9caa38 --- /dev/null +++ b/src/server/ApiManagers/GooglePhotosManager.ts @@ -0,0 +1,108 @@ +import ApiManager, { Registration } from "./ApiManager"; +import { Method, _error, _success, _invalid } from "../RouteManager"; +import { uploadDirectory, NewMediaItem } from ".."; +import { path } from "animejs"; +import { RouteStore } from "../RouteStore"; +import { GoogleApiServerUtils } from "../apis/google/GoogleApiServerUtils"; +import { BatchedArray, TimeUnit } from "array-batcher"; +import { GooglePhotosUploadUtils } from "../apis/google/GooglePhotosUploadUtils"; +import { MediaItem } from "../apis/google/SharedTypes"; +import { Opt } from "../../new_fields/Doc"; +import { DashUploadUtils } from "../DashUploadUtils"; +import { Database } from "../database"; +import { prefix } from "@fortawesome/free-solid-svg-icons"; + +const authenticationError = "Unable to authenticate Google credentials before uploading to Google Photos!"; +const mediaError = "Unable to convert all uploaded bytes to media items!"; +const UploadError = (count: number) => `Unable to upload ${count} images to Dash's server`; +const requestError = "Unable to execute download: the body's media items were malformed."; +const downloadError = "Encountered an error while executing downloads."; +interface GooglePhotosUploadFailure { + batch: number; + index: number; + url: string; + reason: string; +} + +export default class GooglePhotosManager extends ApiManager { + + protected initialize(register: Registration): void { + + register({ + method: Method.POST, + subscription: RouteStore.googlePhotosMediaUpload, + onValidation: async ({ user, req, res }) => { + const { media } = req.body; + + const token = await GoogleApiServerUtils.retrieveAccessToken(user.id); + if (!token) { + return _error(res, authenticationError); + } + + let failed: GooglePhotosUploadFailure[] = []; + const batched = BatchedArray.from(media, { batchSize: 25 }); + const newMediaItems = await batched.batchedMapPatientInterval( + { magnitude: 100, unit: TimeUnit.Milliseconds }, + async (batch, collector, { completedBatches }) => { + for (let index = 0; index < batch.length; index++) { + const { url, description } = batch[index]; + const fail = (reason: string) => failed.push({ reason, batch: completedBatches + 1, index, url }); + const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(token, url).catch(fail); + if (!uploadToken) { + fail(`${path.extname(url)} is not an accepted extension`); + } else { + collector.push({ + description, + simpleMediaItem: { uploadToken } + }); + } + } + } + ); + + const failedCount = failed.length; + if (failedCount) { + console.error(`Unable to upload ${failedCount} image${failedCount === 1 ? "" : "s"} to Google's servers`); + console.log(failed.map(({ reason, batch, index, url }) => `@${batch}.${index}: ${url} failed:\n${reason}`).join('\n\n')); + } + + return GooglePhotosUploadUtils.CreateMediaItems(token, newMediaItems, req.body.album).then( + results => _success(res, { results, failed }), + error => _error(res, mediaError, error) + ); + } + }); + + register({ + method: Method.POST, + subscription: RouteStore.googlePhotosMediaDownload, + onValidation: async ({ req, res }) => { + const contents: { mediaItems: MediaItem[] } = req.body; + let failed = 0; + if (contents) { + const completed: Opt[] = []; + for (let item of contents.mediaItems) { + const { contentSize, ...attributes } = await DashUploadUtils.InspectImage(item.baseUrl); + const found: Opt = await Database.Auxiliary.QueryUploadHistory(contentSize!); + if (!found) { + const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error)); + if (upload) { + completed.push(upload); + await Database.Auxiliary.LogUpload(upload); + } else { + failed++; + } + } else { + completed.push(found); + } + } + if (failed) { + return _error(res, UploadError(failed)); + } + return _success(res, completed); + } + _invalid(res, requestError); + } + }); + } +} \ No newline at end of file diff --git a/src/server/index.ts b/src/server/index.ts index 25697e71f..9a5099d0d 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -585,89 +585,11 @@ function routeSetter(router: RouteManager) { } }); - router.addSupervisedRoute({ - method: Method.GET, - subscription: RouteStore.readGoogleAccessToken, - onValidation: async ({ user, res }) => { - const userId = user.id; - const token = await GoogleApiServerUtils.retrieveAccessToken(userId); - if (!token) { - return res.send(GoogleApiServerUtils.generateAuthenticationUrl()); - } - return res.send(token); - } - }); - - router.addSupervisedRoute({ - method: Method.POST, - subscription: RouteStore.writeGoogleAccessToken, - onValidation: async ({ user, req, res }) => { - res.send(await GoogleApiServerUtils.processNewUser(user.id, req.body.authenticationCode)); - } - }); - - const authenticationError = "Unable to authenticate Google credentials before uploading to Google Photos!"; - const mediaError = "Unable to convert all uploaded bytes to media items!"; - interface GooglePhotosUploadFailure { - batch: number; - index: number; - url: string; - reason: string; - } - - router.addSupervisedRoute({ - method: Method.POST, - subscription: RouteStore.googlePhotosMediaUpload, - onValidation: async ({ user, req, res }) => { - const { media } = req.body; - - const token = await GoogleApiServerUtils.retrieveAccessToken(user.id); - if (!token) { - return _error(res, authenticationError); - } - - let failed: GooglePhotosUploadFailure[] = []; - const batched = BatchedArray.from(media, { batchSize: 25 }); - const newMediaItems = await batched.batchedMapPatientInterval( - { magnitude: 100, unit: TimeUnit.Milliseconds }, - async (batch, collector, { completedBatches }) => { - for (let index = 0; index < batch.length; index++) { - const { url, description } = batch[index]; - const fail = (reason: string) => failed.push({ reason, batch: completedBatches + 1, index, url }); - const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(token, url).catch(fail); - if (!uploadToken) { - fail(`${path.extname(url)} is not an accepted extension`); - } else { - collector.push({ - description, - simpleMediaItem: { uploadToken } - }); - } - } - } - ); - - const failedCount = failed.length; - if (failedCount) { - console.error(`Unable to upload ${failedCount} image${failedCount === 1 ? "" : "s"} to Google's servers`); - console.log(failed.map(({ reason, batch, index, url }) => `@${batch}.${index}: ${url} failed:\n${reason}`).join('\n\n')); - } - - return GooglePhotosUploadUtils.CreateMediaItems(token, newMediaItems, req.body.album).then( - results => _success(res, { results, failed }), - error => _error(res, mediaError, error) - ); - } - }); - interface MediaItem { baseUrl: string; filename: string; } const prefix = "google_photos_"; - - const downloadError = "Encountered an error while executing downloads."; - const requestError = "Unable to execute download: the body's media items were malformed."; const deletionPermissionError = "Cannot perform specialized delete outside of the development environment!"; router.addSupervisedRoute({ @@ -681,49 +603,4 @@ function routeSetter(router: RouteManager) { res.redirect(RouteStore.delete); } }); - - router.addSupervisedRoute({ - method: Method.GET, - subscription: "/deleteWithGoogleCredentials", - onValidation: async ({ res, isRelease }) => { - if (isRelease) { - return _permission_denied(res, deletionPermissionError); - } - await Database.Auxiliary.GoogleAuthenticationToken.DeleteAll(); - res.redirect(RouteStore.delete); - } - }); - - const UploadError = (count: number) => `Unable to upload ${count} images to Dash's server`; - router.addSupervisedRoute({ - method: Method.POST, - subscription: RouteStore.googlePhotosMediaDownload, - onValidation: async ({ req, res }) => { - const contents: { mediaItems: MediaItem[] } = req.body; - let failed = 0; - if (contents) { - const completed: Opt[] = []; - for (let item of contents.mediaItems) { - const { contentSize, ...attributes } = await DashUploadUtils.InspectImage(item.baseUrl); - const found: Opt = await Database.Auxiliary.QueryUploadHistory(contentSize!); - if (!found) { - const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error)); - if (upload) { - completed.push(upload); - await Database.Auxiliary.LogUpload(upload); - } else { - failed++; - } - } else { - completed.push(found); - } - } - if (failed) { - return _error(res, UploadError(failed)); - } - return _success(res, completed); - } - _invalid(res, requestError); - } - }); } \ No newline at end of file -- cgit v1.2.3-70-g09d2 From 36ad83493d2bd58dc6fe62df6002789ccc1b06a1 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sun, 10 Nov 2019 14:56:58 -0500 Subject: no more RouteStore --- src/Utils.ts | 3 +- src/client/apis/GoogleAuthenticationManager.tsx | 5 +-- .../apis/google_docs/GoogleApiClientUtils.ts | 7 ++-- .../apis/google_docs/GooglePhotosClientUtils.ts | 5 +-- src/client/cognitive_services/CognitiveServices.ts | 3 +- src/client/util/History.ts | 3 +- .../util/Import & Export/DirectoryImportBox.tsx | 3 +- src/client/util/Import & Export/ImageUtils.ts | 7 ++-- src/client/util/SharingManager.tsx | 3 +- src/client/views/MainView.tsx | 5 +-- src/client/views/collections/CollectionSubView.tsx | 4 +- src/client/views/nodes/ImageBox.tsx | 3 +- src/client/views/nodes/VideoBox.tsx | 3 +- src/client/views/search/SearchBox.tsx | 3 +- src/mobile/ImageUpload.tsx | 3 +- src/new_fields/RichTextUtils.ts | 3 +- src/server/ApiManagers/DeleteManager.ts | 13 +++---- src/server/ApiManagers/ExportManager.ts | 5 +-- src/server/ApiManagers/PDFManager.ts | 2 +- src/server/ApiManagers/UploadManager.ts | 15 +++----- src/server/ApiManagers/UserManager.ts | 7 ++-- src/server/ApiManagers/UtilManager.ts | 7 ++++ src/server/Initialization.ts | 28 +++++++------- src/server/RouteManager.ts | 5 +-- src/server/RouteStore.ts | 45 ---------------------- src/server/RouteSubscriber.ts | 2 +- src/server/authentication/config/passport.ts | 5 +-- .../authentication/controllers/user_controller.ts | 32 +++++++-------- .../authentication/models/current_user_utils.ts | 7 ++-- src/server/index.ts | 26 ++++++------- 30 files changed, 96 insertions(+), 166 deletions(-) delete mode 100644 src/server/RouteStore.ts (limited to 'src') diff --git a/src/Utils.ts b/src/Utils.ts index 9a2f01f80..abff2eaba 100644 --- a/src/Utils.ts +++ b/src/Utils.ts @@ -2,7 +2,6 @@ import v4 = require('uuid/v4'); import v5 = require("uuid/v5"); import { Socket } from 'socket.io'; import { Message } from './server/Message'; -import { RouteStore } from './server/RouteStore'; export namespace Utils { @@ -46,7 +45,7 @@ export namespace Utils { } export function CorsProxy(url: string): string { - return prepend(RouteStore.corsProxy + "/") + encodeURIComponent(url); + return prepend("/corsProxy/") + encodeURIComponent(url); } export function CopyText(text: string) { diff --git a/src/client/apis/GoogleAuthenticationManager.tsx b/src/client/apis/GoogleAuthenticationManager.tsx index 1ec9d8412..ae77c4b7b 100644 --- a/src/client/apis/GoogleAuthenticationManager.tsx +++ b/src/client/apis/GoogleAuthenticationManager.tsx @@ -4,7 +4,6 @@ import * as React from "react"; import MainViewModal from "../views/MainViewModal"; import { Opt } from "../../new_fields/Doc"; import { Networking } from "../Network"; -import { RouteStore } from "../../server/RouteStore"; import "./GoogleAuthenticationManager.scss"; const AuthenticationUrl = "https://accounts.google.com/o/oauth2/v2/auth"; @@ -31,7 +30,7 @@ export default class GoogleAuthenticationManager extends React.Component<{}> { } public fetchOrGenerateAccessToken = async () => { - let response = await Networking.FetchFromServer(RouteStore.readGoogleAccessToken); + let response = await Networking.FetchFromServer("/readGoogleAccessToken"); // if this is an authentication url, activate the UI to register the new access token if (new RegExp(AuthenticationUrl).test(response)) { this.isOpen = true; @@ -44,7 +43,7 @@ export default class GoogleAuthenticationManager extends React.Component<{}> { return; } const { access_token, avatar, name } = await Networking.PostToServer( - RouteStore.writeGoogleAccessToken, + "/writeGoogleAccessToken", { authenticationCode } ); runInAction(() => { diff --git a/src/client/apis/google_docs/GoogleApiClientUtils.ts b/src/client/apis/google_docs/GoogleApiClientUtils.ts index 183679317..26c7f8d2e 100644 --- a/src/client/apis/google_docs/GoogleApiClientUtils.ts +++ b/src/client/apis/google_docs/GoogleApiClientUtils.ts @@ -1,5 +1,4 @@ import { docs_v1, slides_v1 } from "googleapis"; -import { RouteStore } from "../../../server/RouteStore"; import { Opt } from "../../../new_fields/Doc"; import { isArray } from "util"; import { EditorState } from "prosemirror-state"; @@ -77,7 +76,7 @@ export namespace GoogleApiClientUtils { * @returns the documentId of the newly generated document, or undefined if the creation process fails. */ export const create = async (options: CreateOptions): Promise => { - const path = `${RouteStore.googleDocs}/Documents/${Actions.Create}`; + const path = `/googleDocs/Documents/${Actions.Create}`; const parameters = { requestBody: { title: options.title || `Dash Export (${new Date().toDateString()})` @@ -154,7 +153,7 @@ export namespace GoogleApiClientUtils { } export const retrieve = async (options: RetrieveOptions): Promise => { - const path = `${RouteStore.googleDocs}/Documents/${Actions.Retrieve}`; + const path = `/googleDocs/Documents/${Actions.Retrieve}`; try { const parameters = { documentId: options.documentId }; const schema: RetrievalResult = await Networking.PostToServer(path, parameters); @@ -165,7 +164,7 @@ export namespace GoogleApiClientUtils { }; export const update = async (options: UpdateOptions): Promise => { - const path = `${RouteStore.googleDocs}/Documents/${Actions.Update}`; + const path = `/googleDocs/Documents/${Actions.Update}`; const parameters = { documentId: options.documentId, requestBody: { diff --git a/src/client/apis/google_docs/GooglePhotosClientUtils.ts b/src/client/apis/google_docs/GooglePhotosClientUtils.ts index 402fc64b5..bf8897061 100644 --- a/src/client/apis/google_docs/GooglePhotosClientUtils.ts +++ b/src/client/apis/google_docs/GooglePhotosClientUtils.ts @@ -1,5 +1,4 @@ import { Utils } from "../../../Utils"; -import { RouteStore } from "../../../server/RouteStore"; import { ImageField } from "../../../new_fields/URLField"; import { Cast, StrCast } from "../../../new_fields/Types"; import { Doc, Opt, DocListCastAsync } from "../../../new_fields/Doc"; @@ -307,7 +306,7 @@ export namespace GooglePhotos { }; export const WriteMediaItemsToServer = async (body: { mediaItems: any[] }): Promise => { - const uploads = await Networking.PostToServer(RouteStore.googlePhotosMediaDownload, body); + const uploads = await Networking.PostToServer("/googlePhotosMediaDownload", body); return uploads; }; @@ -345,7 +344,7 @@ export namespace GooglePhotos { media.push({ url, description }); } if (media.length) { - const results = await Networking.PostToServer(RouteStore.googlePhotosMediaUpload, { media, album }); + const results = await Networking.PostToServer("/googlePhotosMediaUpload", { media, album }); return results; } }; diff --git a/src/client/cognitive_services/CognitiveServices.ts b/src/client/cognitive_services/CognitiveServices.ts index 08fcb4883..af5fb39fc 100644 --- a/src/client/cognitive_services/CognitiveServices.ts +++ b/src/client/cognitive_services/CognitiveServices.ts @@ -2,7 +2,6 @@ import * as request from "request-promise"; import { Doc, Field, Opt } from "../../new_fields/Doc"; import { Cast } from "../../new_fields/Types"; import { Docs } from "../documents/Documents"; -import { RouteStore } from "../../server/RouteStore"; import { Utils } from "../../Utils"; import { InkData } from "../../new_fields/InkField"; import { UndoManager } from "../util/UndoManager"; @@ -39,7 +38,7 @@ export enum Confidence { export namespace CognitiveServices { const ExecuteQuery = async (service: Service, manager: APIManager, data: D): Promise => { - return fetch(Utils.prepend(`${RouteStore.cognitiveServices}/${service}`)).then(async response => { + return fetch(Utils.prepend(`cognitiveServices/${service}`)).then(async response => { let apiKey = await response.text(); if (!apiKey) { console.log(`No API key found for ${service}: ensure index.ts has access to a .env file in your root directory`); diff --git a/src/client/util/History.ts b/src/client/util/History.ts index 899abbe40..1c51236cb 100644 --- a/src/client/util/History.ts +++ b/src/client/util/History.ts @@ -1,6 +1,5 @@ import { Doc, Opt, Field } from "../../new_fields/Doc"; import { DocServer } from "../DocServer"; -import { RouteStore } from "../../server/RouteStore"; import { MainView } from "../views/MainView"; import * as qs from 'query-string'; import { Utils, OmitKeys } from "../../Utils"; @@ -26,7 +25,7 @@ export namespace HistoryUtil { // const handlers: ((state: ParsedUrl | null) => void)[] = []; function onHistory(e: PopStateEvent) { - if (window.location.pathname !== RouteStore.home) { + if (window.location.pathname !== "/home") { const url = e.state as ParsedUrl || parseUrl(window.location); if (url) { switch (url.type) { diff --git a/src/client/util/Import & Export/DirectoryImportBox.tsx b/src/client/util/Import & Export/DirectoryImportBox.tsx index 2e0ba25eb..437e7766b 100644 --- a/src/client/util/Import & Export/DirectoryImportBox.tsx +++ b/src/client/util/Import & Export/DirectoryImportBox.tsx @@ -1,7 +1,6 @@ import "fs"; import React = require("react"); import { Doc, DocListCast, DocListCastAsync, Opt } from "../../../new_fields/Doc"; -import { RouteStore } from "../../../server/RouteStore"; import { action, observable, autorun, runInAction, computed, reaction, IReactionDisposer } from "mobx"; import { FieldViewProps, FieldView } from "../../views/nodes/FieldView"; import Measure, { ContentRect } from "react-measure"; @@ -124,7 +123,7 @@ export default class DirectoryImportBox extends React.Component formData.append(Utils.GenerateGuid(), file); }); - collector.push(...(await Networking.PostFormDataToServer(RouteStore.upload, formData))); + collector.push(...(await Networking.PostFormDataToServer("/upload", formData))); runInAction(() => this.completed += batch.length); }); diff --git a/src/client/util/Import & Export/ImageUtils.ts b/src/client/util/Import & Export/ImageUtils.ts index 914f4870a..ca80f3bca 100644 --- a/src/client/util/Import & Export/ImageUtils.ts +++ b/src/client/util/Import & Export/ImageUtils.ts @@ -1,7 +1,6 @@ -import { Doc, DocListCast, DocListCastAsync, Opt } from "../../../new_fields/Doc"; +import { Doc } from "../../../new_fields/Doc"; import { ImageField } from "../../../new_fields/URLField"; import { Cast, StrCast } from "../../../new_fields/Types"; -import { RouteStore } from "../../../server/RouteStore"; import { Docs } from "../../documents/Documents"; import { Networking } from "../../Network"; import { Id } from "../../../new_fields/FieldSymbols"; @@ -15,7 +14,7 @@ export namespace ImageUtils { return false; } const source = field.url.href; - const response = await Networking.PostToServer(RouteStore.inspectImage, { source }); + const response = await Networking.PostToServer("/inspectImage", { source }); const { error, data } = response.exifData; document.exif = error || Docs.Get.DocumentHierarchyFromJson(data); return data !== undefined; @@ -23,7 +22,7 @@ export namespace ImageUtils { export const ExportHierarchyToFileSystem = async (collection: Doc): Promise => { const a = document.createElement("a"); - a.href = Utils.prepend(`${RouteStore.imageHierarchyExport}/${collection[Id]}`); + a.href = Utils.prepend(`imageHierarchyExport/${collection[Id]}`); a.download = `Dash Export [${StrCast(collection.title)}].zip`; a.click(); }; diff --git a/src/client/util/SharingManager.tsx b/src/client/util/SharingManager.tsx index 2082d6324..cc1d628b1 100644 --- a/src/client/util/SharingManager.tsx +++ b/src/client/util/SharingManager.tsx @@ -4,7 +4,6 @@ import MainViewModal from "../views/MainViewModal"; import { Doc, Opt, DocCastAsync } from "../../new_fields/Doc"; import { DocServer } from "../DocServer"; import { Cast, StrCast } from "../../new_fields/Types"; -import { RouteStore } from "../../server/RouteStore"; import * as RequestPromise from "request-promise"; import { Utils } from "../../Utils"; import "./SharingManager.scss"; @@ -104,7 +103,7 @@ export default class SharingManager extends React.Component<{}> { } populateUsers = async () => { - let userList = await RequestPromise.get(Utils.prepend(RouteStore.getUsers)); + let userList = await RequestPromise.get(Utils.prepend("/getUsers")); const raw = JSON.parse(userList) as User[]; const evaluating = raw.map(async user => { let isCandidate = user.email !== Doc.CurrentUserEmail; diff --git a/src/client/views/MainView.tsx b/src/client/views/MainView.tsx index 39585113b..0c5a1003b 100644 --- a/src/client/views/MainView.tsx +++ b/src/client/views/MainView.tsx @@ -12,7 +12,6 @@ import { List } from '../../new_fields/List'; import { listSpec } from '../../new_fields/Schema'; import { Cast, FieldValue, StrCast } from '../../new_fields/Types'; import { CurrentUserUtils } from '../../server/authentication/models/current_user_utils'; -import { RouteStore } from '../../server/RouteStore'; import { emptyFunction, returnEmptyString, returnFalse, returnOne, returnTrue, Utils } from '../../Utils'; import GoogleAuthenticationManager from '../apis/GoogleAuthenticationManager'; import { DocServer } from '../DocServer'; @@ -74,7 +73,7 @@ export class MainView extends React.Component { this._urlState = HistoryUtil.parseUrl(window.location) || {} as any; // causes errors to be generated when modifying an observable outside of an action configure({ enforceActions: "observed" }); - if (window.location.pathname !== RouteStore.home) { + if (window.location.pathname !== "/home") { let pathname = window.location.pathname.substr(1).split("/"); if (pathname.length > 1) { let type = pathname[0]; @@ -395,7 +394,7 @@ export class MainView extends React.Component { zoomToScale={emptyFunction} getScale={returnOne}> - ; diff --git a/src/client/views/collections/CollectionSubView.tsx b/src/client/views/collections/CollectionSubView.tsx index 6e8e4fa12..306f8e052 100644 --- a/src/client/views/collections/CollectionSubView.tsx +++ b/src/client/views/collections/CollectionSubView.tsx @@ -8,7 +8,6 @@ import { listSpec } from "../../../new_fields/Schema"; import { ScriptField } from "../../../new_fields/ScriptField"; import { Cast } from "../../../new_fields/Types"; import { CurrentUserUtils } from "../../../server/authentication/models/current_user_utils"; -import { RouteStore } from "../../../server/RouteStore"; import { Utils } from "../../../Utils"; import { DocServer } from "../../DocServer"; import { DocumentType } from "../../documents/DocumentTypes"; @@ -243,7 +242,6 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { let promises: Promise[] = []; // tslint:disable-next-line:prefer-for-of for (let i = 0; i < e.dataTransfer.items.length; i++) { - const upload = window.location.origin + RouteStore.upload; let item = e.dataTransfer.items[i]; if (item.kind === "string" && item.type.indexOf("uri") !== -1) { let str: string; @@ -268,7 +266,7 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { } let dropFileName = file ? file.name : "-empty-"; - let prom = fetch(upload, { + let prom = fetch(Utils.prepend("/upload"), { method: 'POST', body: formData }).then(async (res: Response) => { diff --git a/src/client/views/nodes/ImageBox.tsx b/src/client/views/nodes/ImageBox.tsx index 9f39eccea..07fd832be 100644 --- a/src/client/views/nodes/ImageBox.tsx +++ b/src/client/views/nodes/ImageBox.tsx @@ -12,7 +12,6 @@ import { createSchema, listSpec, makeInterface } from '../../../new_fields/Schem import { ComputedField } from '../../../new_fields/ScriptField'; import { BoolCast, Cast, FieldValue, NumCast, StrCast } from '../../../new_fields/Types'; import { AudioField, ImageField } from '../../../new_fields/URLField'; -import { RouteStore } from '../../../server/RouteStore'; import { Utils, returnOne, emptyFunction } from '../../../Utils'; import { CognitiveServices, Confidence, Service, Tag } from '../../cognitive_services/CognitiveServices'; import { Docs } from '../../documents/Documents'; @@ -152,7 +151,7 @@ export class ImageBox extends DocAnnotatableComponent { if (isRelease) { return _permission_denied(res, deletionPermissionError); } await WebSocket.deleteFields(); - res.redirect(RouteStore.home); + res.redirect("/home"); } }); register({ method: Method.GET, - subscription: RouteStore.deleteAll, + subscription: "/deleteAll", onValidation: async ({ res, isRelease }) => { if (isRelease) { return _permission_denied(res, deletionPermissionError); } await WebSocket.deleteAll(); - res.redirect(RouteStore.home); + res.redirect("/home"); } }); @@ -41,7 +40,7 @@ export default class DeleteManager extends ApiManager { return _permission_denied(res, deletionPermissionError); } await Database.Auxiliary.DeleteAll(); - res.redirect(RouteStore.delete); + res.redirect("/delete"); } }); @@ -53,7 +52,7 @@ export default class DeleteManager extends ApiManager { return _permission_denied(res, deletionPermissionError); } await Database.Auxiliary.GoogleAuthenticationToken.DeleteAll(); - res.redirect(RouteStore.delete); + res.redirect("/delete"); } }); diff --git a/src/server/ApiManagers/ExportManager.ts b/src/server/ApiManagers/ExportManager.ts index d42db1056..fc6ba0d22 100644 --- a/src/server/ApiManagers/ExportManager.ts +++ b/src/server/ApiManagers/ExportManager.ts @@ -1,7 +1,6 @@ import ApiManager, { Registration } from "./ApiManager"; import { Method } from "../RouteManager"; import RouteSubscriber from "../RouteSubscriber"; -import { RouteStore } from "../RouteStore"; import * as Archiver from 'archiver'; import * as express from 'express'; import { Database } from "../database"; @@ -32,7 +31,7 @@ export default class DownloadManager extends ApiManager { */ register({ method: Method.GET, - subscription: new RouteSubscriber(RouteStore.imageHierarchyExport).add('docId'), + subscription: new RouteSubscriber("imageHierarchyExport").add('docId'), onValidation: async ({ req, res }) => { const id = req.params.docId; const hierarchy: Hierarchy = {}; @@ -43,7 +42,7 @@ export default class DownloadManager extends ApiManager { register({ method: Method.GET, - subscription: new RouteSubscriber("/downloadId").add("docId"), + subscription: new RouteSubscriber("downloadId").add("docId"), onValidation: async ({ req, res }) => { return BuildAndDispatchZip(res, async zip => { const { id, docs, files } = await getDocs(req.params.docId); diff --git a/src/server/ApiManagers/PDFManager.ts b/src/server/ApiManagers/PDFManager.ts index f328557b4..632b4965a 100644 --- a/src/server/ApiManagers/PDFManager.ts +++ b/src/server/ApiManagers/PDFManager.ts @@ -15,7 +15,7 @@ export default class PDFManager extends ApiManager { register({ method: Method.GET, - subscription: new RouteSubscriber("/thumbnail").add("filename"), + subscription: new RouteSubscriber("thumbnail").add("filename"), onValidation: ({ req, res }) => { let filename = req.params.filename; let noExt = filename.substring(0, filename.length - ".png".length); diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index 38635eda5..01abdab54 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -6,7 +6,6 @@ var AdmZip = require('adm-zip'); import * as path from 'path'; import { createReadStream, createWriteStream, unlink, readFileSync } from "fs"; import { publicDirectory, filesDirectory, Partitions } from ".."; -import { RouteStore } from "../RouteStore"; import { Database } from "../database"; import { DashUploadUtils } from "../DashUploadUtils"; import { Opt } from "../../new_fields/Doc"; @@ -85,12 +84,8 @@ export default class UploadManager extends ApiManager { let dirname = path.dirname(entry.entryName) + "/"; let extname = path.extname(entry.entryName); let basename = path.basename(entry.entryName).split(".")[0]; - // zip.extractEntryTo(dirname + basename + "_o" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_s" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_m" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_l" + extname, __dirname + RouteStore.public, true, false); try { - zip.extractEntryTo(entry.entryName, __dirname + RouteStore.public, true, false); + zip.extractEntryTo(entry.entryName, publicDirectory, true, false); dirname = "/" + dirname; createReadStream(publicDirectory + dirname + basename + extname).pipe(createWriteStream(publicDirectory + dirname + basename + "_o" + extname)); @@ -131,7 +126,7 @@ export default class UploadManager extends ApiManager { register({ method: Method.POST, - subscription: RouteStore.upload, + subscription: "/upload", onValidation: async ({ req, res }) => { let form = new formidable.IncomingForm(); form.uploadDir = filesDirectory; @@ -147,7 +142,7 @@ export default class UploadManager extends ApiManager { let dataBuffer = readFileSync(filesDirectory + filename); const result: ParsedPDF = await pdf(dataBuffer); await new Promise((resolve, reject) => { - const path = filesDirectory + Partitions.PdfText + "/" + filename.substring(0, filename.length - ".pdf".length) + ".txt"; + const path = filesDirectory + Partitions.pdf_text + "/" + filename.substring(0, filename.length - ".pdf".length) + ".txt"; createWriteStream(path).write(result.text, error => { if (!error) { resolve(); @@ -171,7 +166,7 @@ export default class UploadManager extends ApiManager { register({ method: Method.POST, - subscription: RouteStore.inspectImage, + subscription: "/inspectImage", onValidation: async ({ req, res }) => { const { source } = req.body; if (typeof source === "string") { @@ -184,7 +179,7 @@ export default class UploadManager extends ApiManager { register({ method: Method.POST, - subscription: RouteStore.dataUriToImage, + subscription: "/uploadURI", onValidation: ({ req, res }) => { const uri = req.body.uri; const filename = req.body.name; diff --git a/src/server/ApiManagers/UserManager.ts b/src/server/ApiManagers/UserManager.ts index fe1ce7f2b..51a434fcf 100644 --- a/src/server/ApiManagers/UserManager.ts +++ b/src/server/ApiManagers/UserManager.ts @@ -1,7 +1,6 @@ import ApiManager, { Registration } from "./ApiManager"; import { Method } from "../RouteManager"; import { WebSocket } from "../Websocket/Websocket"; -import { RouteStore } from "../RouteStore"; import { Database } from "../database"; export default class UserManager extends ApiManager { @@ -10,7 +9,7 @@ export default class UserManager extends ApiManager { register({ method: Method.GET, - subscription: RouteStore.getUsers, + subscription: "/getUsers", onValidation: async ({ res }) => { const cursor = await Database.Instance.query({}, { email: 1, userDocumentId: 1 }, "users"); const results = await cursor.toArray(); @@ -20,13 +19,13 @@ export default class UserManager extends ApiManager { register({ method: Method.GET, - subscription: RouteStore.getUserDocumentId, + subscription: "/getUserDocumentId", onValidation: ({ res, user }) => res.send(user.userDocumentId) }); register({ method: Method.GET, - subscription: RouteStore.getCurrUser, + subscription: "/getCurrentUser", onValidation: ({ res, user }) => res.send(JSON.stringify(user)), onUnauthenticated: ({ res }) => res.send(JSON.stringify({ id: "__guest__", email: "" })) }); diff --git a/src/server/ApiManagers/UtilManager.ts b/src/server/ApiManagers/UtilManager.ts index 61cda2e9b..c1234be6c 100644 --- a/src/server/ApiManagers/UtilManager.ts +++ b/src/server/ApiManagers/UtilManager.ts @@ -2,11 +2,18 @@ import ApiManager, { Registration } from "./ApiManager"; import { Method } from "../RouteManager"; import { exec } from 'child_process'; import { command_line } from "../ActionUtilities"; +import RouteSubscriber from "../RouteSubscriber"; export default class UtilManager extends ApiManager { protected initialize(register: Registration): void { + register({ + method: Method.GET, + subscription: new RouteSubscriber("environment").add("key"), + onValidation: ({ req, res }) => res.send(process.env[req.params.key]) + }); + register({ method: Method.GET, subscription: "/pull", diff --git a/src/server/Initialization.ts b/src/server/Initialization.ts index fbb5ae7a6..306058d81 100644 --- a/src/server/Initialization.ts +++ b/src/server/Initialization.ts @@ -9,7 +9,6 @@ import flash = require('connect-flash'); import { Database } from './database'; import { getForgot, getLogin, getLogout, getReset, getSignup, postForgot, postLogin, postReset, postSignup } from './authentication/controllers/user_controller'; const MongoStore = require('connect-mongo')(session); -import { RouteStore } from './RouteStore'; import RouteManager from './RouteManager'; import * as webpack from 'webpack'; const config = require('../../webpack.config'); @@ -18,6 +17,8 @@ import * as wdm from 'webpack-dev-middleware'; import * as whm from 'webpack-hot-middleware'; import * as fs from 'fs'; import * as request from 'request'; +import RouteSubscriber from './RouteSubscriber'; +import { publicDirectory } from '.'; export type RouteSetter = (server: RouteManager) => void; export interface InitializationOptions { @@ -29,8 +30,8 @@ export default async function InitializeServer(options: InitializationOptions) { const { listenAtPort, routeSetter } = options; const server = buildWithMiddleware(express()); - server.use(express.static(__dirname + RouteStore.public)); - server.use(RouteStore.images, express.static(__dirname + RouteStore.public)); + server.use(express.static(publicDirectory)); + server.use("/images", express.static(publicDirectory)); server.use(wdm(compiler, { publicPath: config.output.publicPath })); server.use(whm(compiler)); @@ -87,24 +88,25 @@ function determineEnvironment() { } function registerAuthenticationRoutes(server: express.Express) { - server.get(RouteStore.signup, getSignup); - server.post(RouteStore.signup, postSignup); + server.get("/signup", getSignup); + server.post("/signup", postSignup); - server.get(RouteStore.login, getLogin); - server.post(RouteStore.login, postLogin); + server.get("/login", getLogin); + server.post("/login", postLogin); - server.get(RouteStore.logout, getLogout); + server.get("/logout", getLogout); - server.get(RouteStore.forgot, getForgot); - server.post(RouteStore.forgot, postForgot); + server.get("/forgotPassword", getForgot); + server.post("/forgotPassword", postForgot); - server.get(RouteStore.reset, getReset); - server.post(RouteStore.reset, postReset); + const reset = new RouteSubscriber("resetPassword").add("token").build; + server.get(reset, getReset); + server.post(reset, postReset); } function registerCorsProxy(server: express.Express) { const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/; - server.use(RouteStore.corsProxy, (req, res) => { + server.use("/corsProxy", (req, res) => { req.pipe(request(decodeURIComponent(req.url.substring(1)))).on("response", res => { const headers = Object.keys(res.headers); headers.forEach(headerName => { diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts index c1d38327f..3aae5734a 100644 --- a/src/server/RouteManager.ts +++ b/src/server/RouteManager.ts @@ -1,5 +1,4 @@ import RouteSubscriber from "./RouteSubscriber"; -import { RouteStore } from "./RouteStore"; import { DashUserModel } from "./authentication/models/user_model"; import * as express from 'express'; @@ -67,10 +66,10 @@ export default class RouteManager { if (onUnauthenticated) { await tryExecute(onUnauthenticated, core); if (!res.headersSent) { - res.redirect(RouteStore.login); + res.redirect("/login"); } } else { - res.redirect(RouteStore.login); + res.redirect("/login"); } } setTimeout(() => { diff --git a/src/server/RouteStore.ts b/src/server/RouteStore.ts deleted file mode 100644 index a310d0c95..000000000 --- a/src/server/RouteStore.ts +++ /dev/null @@ -1,45 +0,0 @@ -// PREPEND ALL ROUTES WITH FORWARD SLASHES! - -export enum RouteStore { - // GENERAL - root = "/", - home = "/home", - corsProxy = "/corsProxy", - delete = "/delete", - deleteAll = "/deleteAll", - pull = "/pull", - - // UPLOAD AND STATIC FILE SERVING - public = "/public", - upload = "/upload", - dataUriToImage = "/uploadURI", - images = "/images", - inspectImage = "/inspectImage", - imageHierarchyExport = "/imageHierarchyExport", - - // USER AND WORKSPACES - getCurrUser = "/getCurrentUser", - getUsers = "/getUsers", - getUserDocumentId = "/getUserDocumentId", - updateCursor = "/updateCursor", - - openDocumentWithId = "/doc/:docId", - - // AUTHENTICATION - signup = "/signup", - login = "/login", - logout = "/logout", - forgot = "/forgotpassword", - reset = "/reset/:token", - - // APIS - cognitiveServices = "/cognitiveservices", - googleDocs = "/googleDocs", - readGoogleAccessToken = "/readGoogleAccessToken", - writeGoogleAccessToken = "/writeGoogleAccessToken", - googlePhotosMediaUpload = "/googlePhotosMediaUpload", - googlePhotosMediaDownload = "/googlePhotosMediaDownload", - googleDocsGet = "/googleDocsGet", - checkGoogle = "/checkGoogleAuthentication" - -} \ No newline at end of file diff --git a/src/server/RouteSubscriber.ts b/src/server/RouteSubscriber.ts index e49be8af5..a1cf7c1c4 100644 --- a/src/server/RouteSubscriber.ts +++ b/src/server/RouteSubscriber.ts @@ -3,7 +3,7 @@ export default class RouteSubscriber { private requestParameters: string[] = []; constructor(root: string) { - this._root = root; + this._root = `/${root}`; } add(...parameters: string[]) { diff --git a/src/server/authentication/config/passport.ts b/src/server/authentication/config/passport.ts index 8915a4abf..0b15c3a36 100644 --- a/src/server/authentication/config/passport.ts +++ b/src/server/authentication/config/passport.ts @@ -3,7 +3,6 @@ import * as passportLocal from 'passport-local'; import _ from "lodash"; import { default as User } from '../models/user_model'; import { Request, Response, NextFunction } from "express"; -import { RouteStore } from '../../RouteStore'; const LocalStrategy = passportLocal.Strategy; @@ -35,13 +34,13 @@ export let isAuthenticated = (req: Request, res: Response, next: NextFunction) = if (req.isAuthenticated()) { return next(); } - return res.redirect(RouteStore.login); + return res.redirect("/login"); }; export let isAuthorized = (req: Request, res: Response, next: NextFunction) => { const provider = req.path.split("/").slice(-1)[0]; - if (_.find((req.user as any).tokens, { kind: provider })) { + if (_.find((req.user).tokens, { kind: provider })) { next(); } else { res.redirect(`/auth/${provider}`); diff --git a/src/server/authentication/controllers/user_controller.ts b/src/server/authentication/controllers/user_controller.ts index f5c6e1610..b2b9d33f6 100644 --- a/src/server/authentication/controllers/user_controller.ts +++ b/src/server/authentication/controllers/user_controller.ts @@ -10,10 +10,7 @@ import * as pug from 'pug'; import * as async from 'async'; import * as nodemailer from 'nodemailer'; import c = require("crypto"); -import { RouteStore } from "../../RouteStore"; import { Utils } from "../../../Utils"; -import { Schema } from "mongoose"; -import { Opt } from "../../../new_fields/Doc"; import { MailOptions } from "nodemailer/lib/stream-transport"; /** @@ -23,8 +20,7 @@ import { MailOptions } from "nodemailer/lib/stream-transport"; */ export let getSignup = (req: Request, res: Response) => { if (req.user) { - let user = req.user; - return res.redirect(RouteStore.home); + return res.redirect("/home"); } res.render("signup.pug", { title: "Sign Up", @@ -45,7 +41,7 @@ export let postSignup = (req: Request, res: Response, next: NextFunction) => { const errors = req.validationErrors(); if (errors) { - return res.redirect(RouteStore.signup); + return res.redirect("/signup"); } const email = req.body.email as String; @@ -62,7 +58,7 @@ export let postSignup = (req: Request, res: Response, next: NextFunction) => { User.findOne({ email }, (err, existingUser) => { if (err) { return next(err); } if (existingUser) { - return res.redirect(RouteStore.login); + return res.redirect("/login"); } user.save((err: any) => { if (err) { return next(err); } @@ -81,7 +77,7 @@ let tryRedirectToTarget = (req: Request, res: Response) => { req.session.target = undefined; res.redirect(target); } else { - res.redirect(RouteStore.home); + res.redirect("/home"); } }; @@ -93,7 +89,7 @@ let tryRedirectToTarget = (req: Request, res: Response) => { export let getLogin = (req: Request, res: Response) => { if (req.user) { req.session!.target = undefined; - return res.redirect(RouteStore.home); + return res.redirect("/home"); } res.render("login.pug", { title: "Log In", @@ -115,13 +111,13 @@ export let postLogin = (req: Request, res: Response, next: NextFunction) => { if (errors) { req.flash("errors", "Unable to login at this time. Please try again."); - return res.redirect(RouteStore.signup); + return res.redirect("/signup"); } passport.authenticate("local", (err: Error, user: DashUserModel, info: IVerifyOptions) => { if (err) { next(err); return; } if (!user) { - return res.redirect(RouteStore.signup); + return res.redirect("/signup"); } req.logIn(user, (err) => { if (err) { next(err); return; } @@ -141,7 +137,7 @@ export let getLogout = (req: Request, res: Response) => { if (sess) { sess.destroy((err) => { if (err) { console.log(err); } }); } - res.redirect(RouteStore.login); + res.redirect("/login"); }; export let getForgot = function (req: Request, res: Response) { @@ -168,7 +164,7 @@ export let postForgot = function (req: Request, res: Response, next: NextFunctio User.findOne({ email }, function (err, user: DashUserModel) { if (!user) { // NO ACCOUNT WITH SUBMITTED EMAIL - res.redirect(RouteStore.forgot); + res.redirect("/forgotPassword"); return; } user.passwordResetToken = token; @@ -192,7 +188,7 @@ export let postForgot = function (req: Request, res: Response, next: NextFunctio subject: 'Dash Password Reset', text: 'You are receiving this because you (or someone else) have requested the reset of the password for your account.\n\n' + 'Please click on the following link, or paste this into your browser to complete the process:\n\n' + - 'http://' + req.headers.host + '/reset/' + token + '\n\n' + + 'http://' + req.headers.host + '/resetPassword/' + token + '\n\n' + 'If you did not request this, please ignore this email and your password will remain unchanged.\n' } as MailOptions; smtpTransport.sendMail(mailOptions, function (err: Error | null) { @@ -202,14 +198,14 @@ export let postForgot = function (req: Request, res: Response, next: NextFunctio } ], function (err) { if (err) return next(err); - res.redirect(RouteStore.forgot); + res.redirect("/forgotPassword"); }); }; export let getReset = function (req: Request, res: Response) { User.findOne({ passwordResetToken: req.params.token, passwordResetExpires: { $gt: Date.now() } }, function (err, user: DashUserModel) { if (!user || err) { - return res.redirect(RouteStore.forgot); + return res.redirect("/forgotPassword"); } res.render("reset.pug", { title: "Reset Password", @@ -239,7 +235,7 @@ export let postReset = function (req: Request, res: Response) { user.save(function (err) { if (err) { - res.redirect(RouteStore.login); + res.redirect("/login"); return; } req.logIn(user, function (err) { @@ -271,6 +267,6 @@ export let postReset = function (req: Request, res: Response) { }); } ], function (err) { - res.redirect(RouteStore.login); + res.redirect("/login"); }); }; \ No newline at end of file diff --git a/src/server/authentication/models/current_user_utils.ts b/src/server/authentication/models/current_user_utils.ts index 73cac879e..5a8815983 100644 --- a/src/server/authentication/models/current_user_utils.ts +++ b/src/server/authentication/models/current_user_utils.ts @@ -11,7 +11,6 @@ import { List } from "../../../new_fields/List"; import { listSpec } from "../../../new_fields/Schema"; import { Cast, StrCast, PromiseValue } from "../../../new_fields/Types"; import { Utils } from "../../../Utils"; -import { RouteStore } from "../../RouteStore"; import { ScriptField } from "../../../new_fields/ScriptField"; import { ButtonBox } from "../../../client/views/nodes/ButtonBox"; import { UndoManager } from "../../../client/util/UndoManager"; @@ -198,8 +197,8 @@ export class CurrentUserUtils { return doc; } - public static loadCurrentUser() { - return rp.get(Utils.prepend(RouteStore.getCurrUser)).then(response => { + public static async loadCurrentUser() { + return rp.get(Utils.prepend("/getCurrentUser")).then(response => { if (response) { const result: { id: string, email: string } = JSON.parse(response); return result; @@ -212,7 +211,7 @@ export class CurrentUserUtils { public static async loadUserDocument({ id, email }: { id: string, email: string }) { this.curr_id = id; Doc.CurrentUserEmail = email; - await rp.get(Utils.prepend(RouteStore.getUserDocumentId)).then(id => { + await rp.get(Utils.prepend("/getUserDocumentId")).then(id => { if (id && id !== "guest") { return DocServer.GetRefField(id).then(async field => { if (field instanceof Doc) { diff --git a/src/server/index.ts b/src/server/index.ts index aec301a74..8fc402cc9 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -3,7 +3,6 @@ import { GoogleApiServerUtils } from "./apis/google/GoogleApiServerUtils"; import * as mobileDetect from 'mobile-detect'; import * as path from 'path'; import { Database } from './database'; -import { RouteStore } from './RouteStore'; const serverPort = 4321; import { GooglePhotosUploadUtils } from './apis/google/GooglePhotosUploadUtils'; import { Opt } from '../new_fields/Doc'; @@ -23,7 +22,7 @@ import DeleteManager from "./ApiManagers/DeleteManager"; import PDFManager from "./ApiManagers/PDFManager"; import UploadManager from "./ApiManagers/UploadManager"; -export const publicDirectory = __dirname + RouteStore.public; +export const publicDirectory = __dirname + "/public"; export const filesDirectory = publicDirectory + "/files/"; export enum Partitions { pdf_text, @@ -73,13 +72,12 @@ function routeSetter(router: RouteManager) { WebSocket.initialize(serverPort, router.isRelease); /** - * Anyone attempting to navigate to localhost at this port will - * first have to log in. + * Accessing root index redirects to home */ router.addSupervisedRoute({ method: Method.GET, - subscription: RouteStore.root, - onValidation: ({ res }) => res.redirect(RouteStore.home) + subscription: "/", + onValidation: ({ res }) => res.redirect("/home") }); const serve: OnUnauthenticated = ({ req, res }) => { @@ -90,7 +88,7 @@ function routeSetter(router: RouteManager) { router.addSupervisedRoute({ method: Method.GET, - subscription: [RouteStore.home, new RouteSubscriber("/doc").add("docId")], + subscription: ["/home", new RouteSubscriber("doc").add("docId")], onValidation: serve, onUnauthenticated: ({ req, ...remaining }) => { const { originalUrl: target } = req; @@ -110,9 +108,9 @@ function routeSetter(router: RouteManager) { router.addSupervisedRoute({ method: Method.GET, - subscription: new RouteSubscriber(RouteStore.cognitiveServices).add('requestedservice'), + subscription: new RouteSubscriber("cognitiveServices").add('requestedService'), onValidation: ({ req, res }) => { - let service = req.params.requestedservice; + let service = req.params.requestedService; res.send(ServicesApiKeyMap.get(service)); } }); @@ -125,7 +123,7 @@ function routeSetter(router: RouteManager) { router.addSupervisedRoute({ method: Method.POST, - subscription: new RouteSubscriber(RouteStore.googleDocs).add("sector", "action"), + subscription: new RouteSubscriber("googleDocs").add("sector", "action"), onValidation: async ({ req, res, user }) => { let sector: GoogleApiServerUtils.Service = req.params.sector as GoogleApiServerUtils.Service; let action: GoogleApiServerUtils.Action = req.params.action as GoogleApiServerUtils.Action; @@ -143,7 +141,7 @@ function routeSetter(router: RouteManager) { router.addSupervisedRoute({ method: Method.GET, - subscription: RouteStore.readGoogleAccessToken, + subscription: "/readGoogleAccessToken", onValidation: async ({ user, res }) => { const userId = user.id; const token = await GoogleApiServerUtils.retrieveAccessToken(userId); @@ -156,7 +154,7 @@ function routeSetter(router: RouteManager) { router.addSupervisedRoute({ method: Method.POST, - subscription: RouteStore.writeGoogleAccessToken, + subscription: "/writeGoogleAccessToken", onValidation: async ({ user, req, res }) => { res.send(await GoogleApiServerUtils.processNewUser(user.id, req.body.authenticationCode)); } @@ -173,7 +171,7 @@ function routeSetter(router: RouteManager) { router.addSupervisedRoute({ method: Method.POST, - subscription: RouteStore.googlePhotosMediaUpload, + subscription: "/googlePhotosMediaUpload", onValidation: async ({ user, req, res }) => { const { media } = req.body; @@ -228,7 +226,7 @@ function routeSetter(router: RouteManager) { const UploadError = (count: number) => `Unable to upload ${count} images to Dash's server`; router.addSupervisedRoute({ method: Method.POST, - subscription: RouteStore.googlePhotosMediaDownload, + subscription: "/googlePhotosMediaDownload", onValidation: async ({ req, res }) => { const contents: { mediaItems: MediaItem[] } = req.body; let failed = 0; -- cgit v1.2.3-70-g09d2 From 63ea273820eb5ab8388e2744fc7988f10746f25f Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sun, 10 Nov 2019 14:59:49 -0500 Subject: now using environment variable route --- src/client/cognitive_services/CognitiveServices.ts | 2 +- src/server/index.ts | 15 --------------- 2 files changed, 1 insertion(+), 16 deletions(-) (limited to 'src') diff --git a/src/client/cognitive_services/CognitiveServices.ts b/src/client/cognitive_services/CognitiveServices.ts index af5fb39fc..e74aef998 100644 --- a/src/client/cognitive_services/CognitiveServices.ts +++ b/src/client/cognitive_services/CognitiveServices.ts @@ -38,7 +38,7 @@ export enum Confidence { export namespace CognitiveServices { const ExecuteQuery = async (service: Service, manager: APIManager, data: D): Promise => { - return fetch(Utils.prepend(`cognitiveServices/${service}`)).then(async response => { + return fetch(Utils.prepend(`environment/${service}`)).then(async response => { let apiKey = await response.text(); if (!apiKey) { console.log(`No API key found for ${service}: ensure index.ts has access to a .env file in your root directory`); diff --git a/src/server/index.ts b/src/server/index.ts index 8fc402cc9..d68e9faa1 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -100,21 +100,6 @@ function routeSetter(router: RouteManager) { } }); - const ServicesApiKeyMap = new Map([ - ["face", process.env.FACE], - ["vision", process.env.VISION], - ["handwriting", process.env.HANDWRITING] - ]); - - router.addSupervisedRoute({ - method: Method.GET, - subscription: new RouteSubscriber("cognitiveServices").add('requestedService'), - onValidation: ({ req, res }) => { - let service = req.params.requestedService; - res.send(ServicesApiKeyMap.get(service)); - } - }); - const EndpointHandlerMap = new Map([ ["create", (api, params) => api.create(params)], ["retrieve", (api, params) => api.get(params)], -- cgit v1.2.3-70-g09d2 From 06a9b3477dfef93af3c2715f5512d0d883191b58 Mon Sep 17 00:00:00 2001 From: Mohammad Amoush Date: Tue, 12 Nov 2019 17:26:58 -0500 Subject: fixed everything except for async --- src/server/ApiManagers/GeneralGoogleManager.ts | 47 ++++++++++++++++++++--- src/server/ApiManagers/GooglePhotosManager.ts | 27 +++++++------ src/server/apis/google/GooglePhotosUploadUtils.ts | 2 +- src/server/index.ts | 9 +++-- 4 files changed, 62 insertions(+), 23 deletions(-) (limited to 'src') diff --git a/src/server/ApiManagers/GeneralGoogleManager.ts b/src/server/ApiManagers/GeneralGoogleManager.ts index cb37b0dce..89efebf78 100644 --- a/src/server/ApiManagers/GeneralGoogleManager.ts +++ b/src/server/ApiManagers/GeneralGoogleManager.ts @@ -1,12 +1,20 @@ import ApiManager, { Registration } from "./ApiManager"; import { Method, _permission_denied } from "../RouteManager"; -import { uploadDirectory } from ".."; -import { path } from "animejs"; -import { RouteStore } from "../RouteStore"; import { GoogleApiServerUtils } from "../apis/google/GoogleApiServerUtils"; import { Database } from "../database"; +import RouteSubscriber from "../RouteSubscriber"; const deletionPermissionError = "Cannot perform specialized delete outside of the development environment!"; +const ServicesApiKeyMap = new Map([ + ["face", process.env.FACE], + ["vision", process.env.VISION], + ["handwriting", process.env.HANDWRITING] +]); +const EndpointHandlerMap = new Map([ + ["create", (api, params) => api.create(params)], + ["retrieve", (api, params) => api.get(params)], + ["update", (api, params) => api.batchUpdate(params)], +]); export default class GeneralGoogleManager extends ApiManager { @@ -14,7 +22,7 @@ export default class GeneralGoogleManager extends ApiManager { register({ method: Method.GET, - subscription: RouteStore.readGoogleAccessToken, + subscription: "/readGoogleAccessToken", onValidation: async ({ user, res }) => { const userId = user.id; const token = await GoogleApiServerUtils.retrieveAccessToken(userId); @@ -27,7 +35,7 @@ export default class GeneralGoogleManager extends ApiManager { register({ method: Method.POST, - subscription: RouteStore.writeGoogleAccessToken, + subscription: "/writeGoogleAccessToken", onValidation: async ({ user, req, res }) => { res.send(await GoogleApiServerUtils.processNewUser(user.id, req.body.authenticationCode)); } @@ -41,7 +49,34 @@ export default class GeneralGoogleManager extends ApiManager { return _permission_denied(res, deletionPermissionError); } await Database.Auxiliary.GoogleAuthenticationToken.DeleteAll(); - res.redirect(RouteStore.delete); + res.redirect("/delete"); + } + }); + + register({ + method: Method.GET, + subscription: new RouteSubscriber("/cognitiveServices").add('requestedservice'), + onValidation: ({ req, res }) => { + let service = req.params.requestedservice; + res.send(ServicesApiKeyMap.get(service)); + } + }); + + register({ + method: Method.POST, + subscription: new RouteSubscriber("/googleDocs").add("sector", "action"), + onValidation: async ({ req, res, user }) => { + let sector: GoogleApiServerUtils.Service = req.params.sector as GoogleApiServerUtils.Service; + let action: GoogleApiServerUtils.Action = req.params.action as GoogleApiServerUtils.Action; + const endpoint = await GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], user.id); + let handler = EndpointHandlerMap.get(action); + if (endpoint && handler) { + handler(endpoint, req.body) + .then(response => res.send(response.data)) + .catch(exception => res.send(exception)); + return; + } + res.send(undefined); } }); } diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts index b5e9caa38..1f6051c28 100644 --- a/src/server/ApiManagers/GooglePhotosManager.ts +++ b/src/server/ApiManagers/GooglePhotosManager.ts @@ -1,16 +1,12 @@ import ApiManager, { Registration } from "./ApiManager"; import { Method, _error, _success, _invalid } from "../RouteManager"; -import { uploadDirectory, NewMediaItem } from ".."; -import { path } from "animejs"; -import { RouteStore } from "../RouteStore"; +import * as path from "path"; import { GoogleApiServerUtils } from "../apis/google/GoogleApiServerUtils"; import { BatchedArray, TimeUnit } from "array-batcher"; import { GooglePhotosUploadUtils } from "../apis/google/GooglePhotosUploadUtils"; -import { MediaItem } from "../apis/google/SharedTypes"; import { Opt } from "../../new_fields/Doc"; import { DashUploadUtils } from "../DashUploadUtils"; import { Database } from "../database"; -import { prefix } from "@fortawesome/free-solid-svg-icons"; const authenticationError = "Unable to authenticate Google credentials before uploading to Google Photos!"; const mediaError = "Unable to convert all uploaded bytes to media items!"; @@ -23,6 +19,17 @@ interface GooglePhotosUploadFailure { url: string; reason: string; } +interface MediaItem { + baseUrl: string; + filename: string; +} +interface NewMediaItem { + description: string; + simpleMediaItem: { + uploadToken: string; + }; +} +const prefix = "google_photos_"; export default class GooglePhotosManager extends ApiManager { @@ -30,20 +37,18 @@ export default class GooglePhotosManager extends ApiManager { register({ method: Method.POST, - subscription: RouteStore.googlePhotosMediaUpload, + subscription: "/googlePhotosMediaUpload", onValidation: async ({ user, req, res }) => { const { media } = req.body; - const token = await GoogleApiServerUtils.retrieveAccessToken(user.id); if (!token) { return _error(res, authenticationError); } - let failed: GooglePhotosUploadFailure[] = []; const batched = BatchedArray.from(media, { batchSize: 25 }); const newMediaItems = await batched.batchedMapPatientInterval( { magnitude: 100, unit: TimeUnit.Milliseconds }, - async (batch, collector, { completedBatches }) => { + async (batch: any, collector: any, { completedBatches }: any) => { for (let index = 0; index < batch.length; index++) { const { url, description } = batch[index]; const fail = (reason: string) => failed.push({ reason, batch: completedBatches + 1, index, url }); @@ -59,13 +64,11 @@ export default class GooglePhotosManager extends ApiManager { } } ); - const failedCount = failed.length; if (failedCount) { console.error(`Unable to upload ${failedCount} image${failedCount === 1 ? "" : "s"} to Google's servers`); console.log(failed.map(({ reason, batch, index, url }) => `@${batch}.${index}: ${url} failed:\n${reason}`).join('\n\n')); } - return GooglePhotosUploadUtils.CreateMediaItems(token, newMediaItems, req.body.album).then( results => _success(res, { results, failed }), error => _error(res, mediaError, error) @@ -75,7 +78,7 @@ export default class GooglePhotosManager extends ApiManager { register({ method: Method.POST, - subscription: RouteStore.googlePhotosMediaDownload, + subscription: "/googlePhotosMediaDownload", onValidation: async ({ req, res }) => { const contents: { mediaItems: MediaItem[] } = req.body; let failed = 0; diff --git a/src/server/apis/google/GooglePhotosUploadUtils.ts b/src/server/apis/google/GooglePhotosUploadUtils.ts index 0abed3f1d..27532d7f0 100644 --- a/src/server/apis/google/GooglePhotosUploadUtils.ts +++ b/src/server/apis/google/GooglePhotosUploadUtils.ts @@ -122,7 +122,7 @@ export namespace GooglePhotosUploadUtils { // ...so we execute them in delayed batches and await the entire execution return batched.batchedMapPatientInterval( { magnitude: 100, unit: TimeUnit.Milliseconds }, - async (batch: NewMediaItem[], collector) => { + async (batch: NewMediaItem[], collector: any) => { const parameters = { method: 'POST', headers: headers('json', bearerToken), diff --git a/src/server/index.ts b/src/server/index.ts index 59752d6de..773b84403 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -4,10 +4,7 @@ import * as mobileDetect from 'mobile-detect'; import * as path from 'path'; import { Database } from './database'; const serverPort = 4321; -import { GooglePhotosUploadUtils } from './apis/google/GooglePhotosUploadUtils'; -import { Opt } from '../new_fields/Doc'; import { DashUploadUtils } from './DashUploadUtils'; -import { BatchedArray, TimeUnit } from 'array-batcher'; import RouteSubscriber from './RouteSubscriber'; import initializeServer from './Initialization'; import RouteManager, { Method, _success, _permission_denied, _error, _invalid, OnUnauthenticated } from './RouteManager'; @@ -21,6 +18,8 @@ import { GoogleCredentialsLoader } from './credentials/CredentialsLoader'; import DeleteManager from "./ApiManagers/DeleteManager"; import PDFManager from "./ApiManagers/PDFManager"; import UploadManager from "./ApiManagers/UploadManager"; +import GeneralGoogleManager from "./ApiManagers/GeneralGoogleManager"; +import GooglePhotosManager from "./ApiManagers/GooglePhotosManager"; export const publicDirectory = __dirname + "/public"; export const filesDirectory = publicDirectory + "/files/"; @@ -64,7 +63,9 @@ function routeSetter(router: RouteManager) { new SearchManager(), new PDFManager(), new DeleteManager(), - new UtilManager() + new UtilManager(), + new GeneralGoogleManager(), + new GooglePhotosManager(), ].forEach(manager => manager.register(router)); // initialize the web socket (bidirectional communication: if a user changes -- cgit v1.2.3-70-g09d2 From ab285371f6fb2a4f1e64888bafbc84b602f23416 Mon Sep 17 00:00:00 2001 From: Mohammad Amoush Date: Tue, 12 Nov 2019 18:04:41 -0500 Subject: only one more problem... --- src/server/apis/google/GooglePhotosUploadUtils.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src') diff --git a/src/server/apis/google/GooglePhotosUploadUtils.ts b/src/server/apis/google/GooglePhotosUploadUtils.ts index 27532d7f0..3862fedc8 100644 --- a/src/server/apis/google/GooglePhotosUploadUtils.ts +++ b/src/server/apis/google/GooglePhotosUploadUtils.ts @@ -122,7 +122,7 @@ export namespace GooglePhotosUploadUtils { // ...so we execute them in delayed batches and await the entire execution return batched.batchedMapPatientInterval( { magnitude: 100, unit: TimeUnit.Milliseconds }, - async (batch: NewMediaItem[], collector: any) => { + async (batch: NewMediaItem[], collector: any): Promise => { const parameters = { method: 'POST', headers: headers('json', bearerToken), -- cgit v1.2.3-70-g09d2 From 8441282d3d07d90536464e7b084b278ea9957cf8 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Mon, 18 Nov 2019 16:59:26 -0500 Subject: log --- src/server/ActionUtilities.ts | 8 +++++++- src/server/index.ts | 5 +++-- 2 files changed, 10 insertions(+), 3 deletions(-) (limited to 'src') diff --git a/src/server/ActionUtilities.ts b/src/server/ActionUtilities.ts index 9a009791b..b418772a6 100644 --- a/src/server/ActionUtilities.ts +++ b/src/server/ActionUtilities.ts @@ -25,4 +25,10 @@ export const write_text_file = (relativePath: string, contents: any) => { return new Promise((resolve, reject) => { fs.writeFile(target, contents, (err) => err ? reject(err) : resolve()); }); -}; \ No newline at end of file +}; + +export async function log_execution(startMessage: string, endMessage: string, contents: () => void | Promise) { + console.log('\x1b[36m%s\x1b[0m', `${startMessage}...`); + await contents(); + console.log(endMessage); +} \ No newline at end of file diff --git a/src/server/index.ts b/src/server/index.ts index d68e9faa1..fb031a254 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -21,6 +21,7 @@ import { GoogleCredentialsLoader } from './credentials/CredentialsLoader'; import DeleteManager from "./ApiManagers/DeleteManager"; import PDFManager from "./ApiManagers/PDFManager"; import UploadManager from "./ApiManagers/UploadManager"; +import { log_execution } from "./ActionUtilities"; export const publicDirectory = __dirname + "/public"; export const filesDirectory = publicDirectory + "/files/"; @@ -43,7 +44,7 @@ async function preliminaryFunctions() { // divide the public directory based on type await Promise.all(Object.keys(Partitions).map(partition => DashUploadUtils.createIfNotExists(filesDirectory + partition))); // connect to the database - await Database.tryInitializeConnection(); + await log_execution("attempting to initialize database connection", "connected", Database.tryInitializeConnection); } /** @@ -243,6 +244,6 @@ function routeSetter(router: RouteManager) { } (async function start() { - await preliminaryFunctions(); + await log_execution("starting execution of preliminary functions", "completed preliminary functions", preliminaryFunctions); await initializeServer({ listenAtPort: 1050, routeSetter }); })(); -- cgit v1.2.3-70-g09d2 From 00633c834c725bab78cef5bd7b9c4ff2b1449ccf Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 19 Nov 2019 11:18:08 -0500 Subject: api key route switched to environment, added client side util --- src/Utils.ts | 7 +++++- src/client/cognitive_services/CognitiveServices.ts | 28 ++++++++++------------ src/server/ApiManagers/GeneralGoogleManager.ts | 16 ++----------- src/server/ApiManagers/GooglePhotosManager.ts | 1 + 4 files changed, 22 insertions(+), 30 deletions(-) (limited to 'src') diff --git a/src/Utils.ts b/src/Utils.ts index abff2eaba..91fa459c6 100644 --- a/src/Utils.ts +++ b/src/Utils.ts @@ -48,6 +48,11 @@ export namespace Utils { return prepend("/corsProxy/") + encodeURIComponent(url); } + export async function getApiKey(target: string): Promise { + const response = await fetch(prepend(`environment/${target.toUpperCase()}`)); + return response.text(); + } + export function CopyText(text: string) { var textArea = document.createElement("textarea"); textArea.value = text; @@ -174,7 +179,7 @@ export namespace Utils { } let idString = (message.id || "").padStart(36, ' '); prefix = prefix.padEnd(16, ' '); - console.log(`${prefix}: ${idString}, ${receiving ? 'receiving' : 'sending'} ${messageName} with data ${JSON.stringify(message)}`); + console.log(`${prefix}: ${idString}, ${receiving ? 'receiving' : 'sending'} ${messageName} with data ${JSON.stringify(message)} `); } function loggingCallback(prefix: string, func: (args: any) => any, messageName: string) { diff --git a/src/client/cognitive_services/CognitiveServices.ts b/src/client/cognitive_services/CognitiveServices.ts index e74aef998..5a7f5e991 100644 --- a/src/client/cognitive_services/CognitiveServices.ts +++ b/src/client/cognitive_services/CognitiveServices.ts @@ -38,21 +38,19 @@ export enum Confidence { export namespace CognitiveServices { const ExecuteQuery = async (service: Service, manager: APIManager, data: D): Promise => { - return fetch(Utils.prepend(`environment/${service}`)).then(async response => { - let apiKey = await response.text(); - if (!apiKey) { - console.log(`No API key found for ${service}: ensure index.ts has access to a .env file in your root directory`); - return undefined; - } - - let results: any; - try { - results = await manager.requester(apiKey, manager.converter(data), service).then(json => JSON.parse(json)); - } catch { - results = undefined; - } - return results; - }); + const apiKey = await Utils.getApiKey(service); + if (!apiKey) { + console.log(`No API key found for ${service}: ensure index.ts has access to a .env file in your root directory.`); + return undefined; + } + + let results: any; + try { + results = await manager.requester(apiKey, manager.converter(data), service).then(json => JSON.parse(json)); + } catch { + results = undefined; + } + return results; }; export namespace Image { diff --git a/src/server/ApiManagers/GeneralGoogleManager.ts b/src/server/ApiManagers/GeneralGoogleManager.ts index 89efebf78..171912185 100644 --- a/src/server/ApiManagers/GeneralGoogleManager.ts +++ b/src/server/ApiManagers/GeneralGoogleManager.ts @@ -5,11 +5,7 @@ import { Database } from "../database"; import RouteSubscriber from "../RouteSubscriber"; const deletionPermissionError = "Cannot perform specialized delete outside of the development environment!"; -const ServicesApiKeyMap = new Map([ - ["face", process.env.FACE], - ["vision", process.env.VISION], - ["handwriting", process.env.HANDWRITING] -]); + const EndpointHandlerMap = new Map([ ["create", (api, params) => api.create(params)], ["retrieve", (api, params) => api.get(params)], @@ -53,15 +49,6 @@ export default class GeneralGoogleManager extends ApiManager { } }); - register({ - method: Method.GET, - subscription: new RouteSubscriber("/cognitiveServices").add('requestedservice'), - onValidation: ({ req, res }) => { - let service = req.params.requestedservice; - res.send(ServicesApiKeyMap.get(service)); - } - }); - register({ method: Method.POST, subscription: new RouteSubscriber("/googleDocs").add("sector", "action"), @@ -79,5 +66,6 @@ export default class GeneralGoogleManager extends ApiManager { res.send(undefined); } }); + } } \ No newline at end of file diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts index 1f6051c28..1138dede1 100644 --- a/src/server/ApiManagers/GooglePhotosManager.ts +++ b/src/server/ApiManagers/GooglePhotosManager.ts @@ -107,5 +107,6 @@ export default class GooglePhotosManager extends ApiManager { _invalid(res, requestError); } }); + } } \ No newline at end of file -- cgit v1.2.3-70-g09d2 From 611bb858265b6667f2b7db858d183cea16f273aa Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 19 Nov 2019 11:23:06 -0500 Subject: rename --- src/server/ApiManagers/DownloadManager.ts | 267 ++++++++++++++++++++++++++++++ src/server/ApiManagers/ExportManager.ts | 267 ------------------------------ src/server/index.ts | 2 +- 3 files changed, 268 insertions(+), 268 deletions(-) create mode 100644 src/server/ApiManagers/DownloadManager.ts delete mode 100644 src/server/ApiManagers/ExportManager.ts (limited to 'src') diff --git a/src/server/ApiManagers/DownloadManager.ts b/src/server/ApiManagers/DownloadManager.ts new file mode 100644 index 000000000..fc6ba0d22 --- /dev/null +++ b/src/server/ApiManagers/DownloadManager.ts @@ -0,0 +1,267 @@ +import ApiManager, { Registration } from "./ApiManager"; +import { Method } from "../RouteManager"; +import RouteSubscriber from "../RouteSubscriber"; +import * as Archiver from 'archiver'; +import * as express from 'express'; +import { Database } from "../database"; +import * as path from "path"; +import { DashUploadUtils } from "../DashUploadUtils"; +import { publicDirectory } from ".."; + +export type Hierarchy = { [id: string]: string | Hierarchy }; +export type ZipMutator = (file: Archiver.Archiver) => void | Promise; +export interface DocumentElements { + data: string | any[]; + title: string; +} + +export default class DownloadManager extends ApiManager { + + protected initialize(register: Registration): void { + + /** + * Let's say someone's using Dash to organize images in collections. + * This lets them export the hierarchy they've built to their + * own file system in a useful format. + * + * This handler starts with a single document id (interesting only + * if it's that of a collection). It traverses the database, captures + * the nesting of only nested images or collections, writes + * that to a zip file and returns it to the client for download. + */ + register({ + method: Method.GET, + subscription: new RouteSubscriber("imageHierarchyExport").add('docId'), + onValidation: async ({ req, res }) => { + const id = req.params.docId; + const hierarchy: Hierarchy = {}; + await buildHierarchyRecursive(id, hierarchy); + return BuildAndDispatchZip(res, zip => writeHierarchyRecursive(zip, hierarchy)); + } + }); + + register({ + method: Method.GET, + subscription: new RouteSubscriber("downloadId").add("docId"), + onValidation: async ({ req, res }) => { + return BuildAndDispatchZip(res, async zip => { + const { id, docs, files } = await getDocs(req.params.docId); + const docString = JSON.stringify({ id, docs }); + zip.append(docString, { name: "doc.json" }); + files.forEach(val => { + zip.file(publicDirectory + val, { name: val.substring(1) }); + }); + }); + } + }); + + register({ + method: Method.GET, + subscription: new RouteSubscriber("/serializeDoc").add("docId"), + onValidation: async ({ req, res }) => { + const { docs, files } = await getDocs(req.params.docId); + res.send({ docs, files: Array.from(files) }); + } + }); + + + } + +} + +async function getDocs(id: string) { + const files = new Set(); + const docs: { [id: string]: any } = {}; + const fn = (doc: any): string[] => { + const id = doc.id; + if (typeof id === "string" && id.endsWith("Proto")) { + //Skip protos + return []; + } + const ids: string[] = []; + for (const key in doc.fields) { + if (!doc.fields.hasOwnProperty(key)) { + continue; + } + const field = doc.fields[key]; + if (field === undefined || field === null) { + continue; + } + + if (field.__type === "proxy" || field.__type === "prefetch_proxy") { + ids.push(field.fieldId); + } else if (field.__type === "script" || field.__type === "computed") { + if (field.captures) { + ids.push(field.captures.fieldId); + } + } else if (field.__type === "list") { + ids.push(...fn(field)); + } else if (typeof field === "string") { + const re = /"(?:dataD|d)ocumentId"\s*:\s*"([\w\-]*)"/g; + let match: string[] | null; + while ((match = re.exec(field)) !== null) { + ids.push(match[1]); + } + } else if (field.__type === "RichTextField") { + const re = /"href"\s*:\s*"(.*?)"/g; + let match: string[] | null; + while ((match = re.exec(field.Data)) !== null) { + const urlString = match[1]; + const split = new URL(urlString).pathname.split("doc/"); + if (split.length > 1) { + ids.push(split[split.length - 1]); + } + } + const re2 = /"src"\s*:\s*"(.*?)"/g; + while ((match = re2.exec(field.Data)) !== null) { + const urlString = match[1]; + const pathname = new URL(urlString).pathname; + files.add(pathname); + } + } else if (["audio", "image", "video", "pdf", "web"].includes(field.__type)) { + const url = new URL(field.url); + const pathname = url.pathname; + files.add(pathname); + } + } + + if (doc.id) { + docs[doc.id] = doc; + } + return ids; + }; + await Database.Instance.visit([id], fn); + return { id, docs, files }; +} + +/** + * This utility function factors out the process + * of creating a zip file and sending it back to the client + * by piping it into a response. + * + * Learn more about piping and readable / writable streams here! + * https://www.freecodecamp.org/news/node-js-streams-everything-you-need-to-know-c9141306be93/ + * + * @param res the writable stream response object that will transfer the generated zip file + * @param mutator the callback function used to actually modify and insert information into the zip instance + */ +export async function BuildAndDispatchZip(res: express.Response, mutator: ZipMutator): Promise { + res.set('Content-disposition', `attachment;`); + res.set('Content-Type', "application/zip"); + const zip = Archiver('zip'); + zip.pipe(res); + await mutator(zip); + return zip.finalize(); +} + +/** + * This function starts with a single document id as a seed, + * typically that of a collection, and then descends the entire tree + * of image or collection documents that are reachable from that seed. + * @param seedId the id of the root of the subtree we're trying to capture, interesting only if it's a collection + * @param hierarchy the data structure we're going to use to record the nesting of the collections and images as we descend + */ + +/* +Below is an example of the JSON hierarchy built from two images contained inside a collection titled 'a nested collection', +following the general recursive structure shown immediately below +{ + "parent folder name":{ + "first child's fild name":"first child's url" + ... + "nth child's fild name":"nth child's url" + } +} +{ + "a nested collection (865c4734-c036-4d67-a588-c71bb43d1440)":{ + "an image of a cat (ace99ffd-8ed8-4026-a5d5-a353fff57bdd).jpg":"https://upload.wikimedia.org/wikipedia/commons/thumb/3/3a/Cat03.jpg/1200px-Cat03.jpg", + "1*SGJw31T5Q9Zfsk24l2yirg.gif (9321cc9b-9b3e-4cb6-b99c-b7e667340f05).gif":"https://cdn-media-1.freecodecamp.org/images/1*SGJw31T5Q9Zfsk24l2yirg.gif" + } +} +*/ +async function buildHierarchyRecursive(seedId: string, hierarchy: Hierarchy): Promise { + const { title, data } = await getData(seedId); + const label = `${title} (${seedId})`; + // is the document a collection? + if (Array.isArray(data)) { + // recurse over all documents in the collection. + const local: Hierarchy = {}; // create a child hierarchy for this level, which will get passed in as the parent of the recursive call + hierarchy[label] = local; // store it at the index in the parent, so we'll end up with a map of maps of maps + await Promise.all(data.map(proxy => buildHierarchyRecursive(proxy.fieldId, local))); + } else { + // now, data can only be a string, namely the url of the image + const filename = label + path.extname(data); // this is the file name under which the output image will be stored + hierarchy[filename] = data; + } +} + +/** + * This is a very specific utility method to help traverse the database + * to parse data and titles out of images and collections alone. + * + * We don't know if the document id given to is corresponds to a view document or a data + * document. If it's a data document, the response from the database will have + * a data field. If not, call recursively on the proto, and resolve with *its* data + * + * @param targetId the id of the Dash document whose data is being requests + * @returns the data of the document, as well as its title + */ +async function getData(targetId: string): Promise { + return new Promise((resolve, reject) => { + Database.Instance.getDocument(targetId, async (result: any) => { + const { data, proto, title } = result.fields; + if (data) { + if (data.url) { + resolve({ data: data.url, title }); + } else if (data.fields) { + resolve({ data: data.fields, title }); + } else { + reject(); + } + } else if (proto) { + getData(proto.fieldId).then(resolve, reject); + } else { + reject(); + } + }); + }); +} + +/** + * + * @param file the zip file to which we write the files + * @param hierarchy the data structure from which we read, defining the nesting of the documents in the zip + * @param prefix lets us create nested folders in the zip file by continually appending to the end + * of the prefix with each layer of recursion. + * + * Function Call #1 => "Dash Export" + * Function Call #2 => "Dash Export/a nested collection" + * Function Call #3 => "Dash Export/a nested collection/lowest level collection" + * ... + */ +async function writeHierarchyRecursive(file: Archiver.Archiver, hierarchy: Hierarchy, prefix = "Dash Export"): Promise { + for (const documentTitle of Object.keys(hierarchy)) { + const result = hierarchy[documentTitle]; + // base case or leaf node, we've hit a url (image) + if (typeof result === "string") { + let path: string; + let matches: RegExpExecArray | null; + if ((matches = /\:1050\/files\/(upload\_[\da-z]{32}.*)/g.exec(result)) !== null) { + // image already exists on our server + path = `${__dirname}/public/files/${matches[1]}`; + } else { + // the image doesn't already exist on our server (may have been dragged + // and dropped in the browser and thus hosted remotely) so we upload it + // to our server and point the zip file to it, so it can bundle up the bytes + const information = await DashUploadUtils.UploadImage(result); + path = information.mediaPaths[0]; + } + // write the file specified by the path to the directory in the + // zip file given by the prefix. + file.file(path, { name: documentTitle, prefix }); + } else { + // we've hit a collection, so we have to recurse + await writeHierarchyRecursive(file, result, `${prefix}/${documentTitle}`); + } + } +} \ No newline at end of file diff --git a/src/server/ApiManagers/ExportManager.ts b/src/server/ApiManagers/ExportManager.ts deleted file mode 100644 index fc6ba0d22..000000000 --- a/src/server/ApiManagers/ExportManager.ts +++ /dev/null @@ -1,267 +0,0 @@ -import ApiManager, { Registration } from "./ApiManager"; -import { Method } from "../RouteManager"; -import RouteSubscriber from "../RouteSubscriber"; -import * as Archiver from 'archiver'; -import * as express from 'express'; -import { Database } from "../database"; -import * as path from "path"; -import { DashUploadUtils } from "../DashUploadUtils"; -import { publicDirectory } from ".."; - -export type Hierarchy = { [id: string]: string | Hierarchy }; -export type ZipMutator = (file: Archiver.Archiver) => void | Promise; -export interface DocumentElements { - data: string | any[]; - title: string; -} - -export default class DownloadManager extends ApiManager { - - protected initialize(register: Registration): void { - - /** - * Let's say someone's using Dash to organize images in collections. - * This lets them export the hierarchy they've built to their - * own file system in a useful format. - * - * This handler starts with a single document id (interesting only - * if it's that of a collection). It traverses the database, captures - * the nesting of only nested images or collections, writes - * that to a zip file and returns it to the client for download. - */ - register({ - method: Method.GET, - subscription: new RouteSubscriber("imageHierarchyExport").add('docId'), - onValidation: async ({ req, res }) => { - const id = req.params.docId; - const hierarchy: Hierarchy = {}; - await buildHierarchyRecursive(id, hierarchy); - return BuildAndDispatchZip(res, zip => writeHierarchyRecursive(zip, hierarchy)); - } - }); - - register({ - method: Method.GET, - subscription: new RouteSubscriber("downloadId").add("docId"), - onValidation: async ({ req, res }) => { - return BuildAndDispatchZip(res, async zip => { - const { id, docs, files } = await getDocs(req.params.docId); - const docString = JSON.stringify({ id, docs }); - zip.append(docString, { name: "doc.json" }); - files.forEach(val => { - zip.file(publicDirectory + val, { name: val.substring(1) }); - }); - }); - } - }); - - register({ - method: Method.GET, - subscription: new RouteSubscriber("/serializeDoc").add("docId"), - onValidation: async ({ req, res }) => { - const { docs, files } = await getDocs(req.params.docId); - res.send({ docs, files: Array.from(files) }); - } - }); - - - } - -} - -async function getDocs(id: string) { - const files = new Set(); - const docs: { [id: string]: any } = {}; - const fn = (doc: any): string[] => { - const id = doc.id; - if (typeof id === "string" && id.endsWith("Proto")) { - //Skip protos - return []; - } - const ids: string[] = []; - for (const key in doc.fields) { - if (!doc.fields.hasOwnProperty(key)) { - continue; - } - const field = doc.fields[key]; - if (field === undefined || field === null) { - continue; - } - - if (field.__type === "proxy" || field.__type === "prefetch_proxy") { - ids.push(field.fieldId); - } else if (field.__type === "script" || field.__type === "computed") { - if (field.captures) { - ids.push(field.captures.fieldId); - } - } else if (field.__type === "list") { - ids.push(...fn(field)); - } else if (typeof field === "string") { - const re = /"(?:dataD|d)ocumentId"\s*:\s*"([\w\-]*)"/g; - let match: string[] | null; - while ((match = re.exec(field)) !== null) { - ids.push(match[1]); - } - } else if (field.__type === "RichTextField") { - const re = /"href"\s*:\s*"(.*?)"/g; - let match: string[] | null; - while ((match = re.exec(field.Data)) !== null) { - const urlString = match[1]; - const split = new URL(urlString).pathname.split("doc/"); - if (split.length > 1) { - ids.push(split[split.length - 1]); - } - } - const re2 = /"src"\s*:\s*"(.*?)"/g; - while ((match = re2.exec(field.Data)) !== null) { - const urlString = match[1]; - const pathname = new URL(urlString).pathname; - files.add(pathname); - } - } else if (["audio", "image", "video", "pdf", "web"].includes(field.__type)) { - const url = new URL(field.url); - const pathname = url.pathname; - files.add(pathname); - } - } - - if (doc.id) { - docs[doc.id] = doc; - } - return ids; - }; - await Database.Instance.visit([id], fn); - return { id, docs, files }; -} - -/** - * This utility function factors out the process - * of creating a zip file and sending it back to the client - * by piping it into a response. - * - * Learn more about piping and readable / writable streams here! - * https://www.freecodecamp.org/news/node-js-streams-everything-you-need-to-know-c9141306be93/ - * - * @param res the writable stream response object that will transfer the generated zip file - * @param mutator the callback function used to actually modify and insert information into the zip instance - */ -export async function BuildAndDispatchZip(res: express.Response, mutator: ZipMutator): Promise { - res.set('Content-disposition', `attachment;`); - res.set('Content-Type', "application/zip"); - const zip = Archiver('zip'); - zip.pipe(res); - await mutator(zip); - return zip.finalize(); -} - -/** - * This function starts with a single document id as a seed, - * typically that of a collection, and then descends the entire tree - * of image or collection documents that are reachable from that seed. - * @param seedId the id of the root of the subtree we're trying to capture, interesting only if it's a collection - * @param hierarchy the data structure we're going to use to record the nesting of the collections and images as we descend - */ - -/* -Below is an example of the JSON hierarchy built from two images contained inside a collection titled 'a nested collection', -following the general recursive structure shown immediately below -{ - "parent folder name":{ - "first child's fild name":"first child's url" - ... - "nth child's fild name":"nth child's url" - } -} -{ - "a nested collection (865c4734-c036-4d67-a588-c71bb43d1440)":{ - "an image of a cat (ace99ffd-8ed8-4026-a5d5-a353fff57bdd).jpg":"https://upload.wikimedia.org/wikipedia/commons/thumb/3/3a/Cat03.jpg/1200px-Cat03.jpg", - "1*SGJw31T5Q9Zfsk24l2yirg.gif (9321cc9b-9b3e-4cb6-b99c-b7e667340f05).gif":"https://cdn-media-1.freecodecamp.org/images/1*SGJw31T5Q9Zfsk24l2yirg.gif" - } -} -*/ -async function buildHierarchyRecursive(seedId: string, hierarchy: Hierarchy): Promise { - const { title, data } = await getData(seedId); - const label = `${title} (${seedId})`; - // is the document a collection? - if (Array.isArray(data)) { - // recurse over all documents in the collection. - const local: Hierarchy = {}; // create a child hierarchy for this level, which will get passed in as the parent of the recursive call - hierarchy[label] = local; // store it at the index in the parent, so we'll end up with a map of maps of maps - await Promise.all(data.map(proxy => buildHierarchyRecursive(proxy.fieldId, local))); - } else { - // now, data can only be a string, namely the url of the image - const filename = label + path.extname(data); // this is the file name under which the output image will be stored - hierarchy[filename] = data; - } -} - -/** - * This is a very specific utility method to help traverse the database - * to parse data and titles out of images and collections alone. - * - * We don't know if the document id given to is corresponds to a view document or a data - * document. If it's a data document, the response from the database will have - * a data field. If not, call recursively on the proto, and resolve with *its* data - * - * @param targetId the id of the Dash document whose data is being requests - * @returns the data of the document, as well as its title - */ -async function getData(targetId: string): Promise { - return new Promise((resolve, reject) => { - Database.Instance.getDocument(targetId, async (result: any) => { - const { data, proto, title } = result.fields; - if (data) { - if (data.url) { - resolve({ data: data.url, title }); - } else if (data.fields) { - resolve({ data: data.fields, title }); - } else { - reject(); - } - } else if (proto) { - getData(proto.fieldId).then(resolve, reject); - } else { - reject(); - } - }); - }); -} - -/** - * - * @param file the zip file to which we write the files - * @param hierarchy the data structure from which we read, defining the nesting of the documents in the zip - * @param prefix lets us create nested folders in the zip file by continually appending to the end - * of the prefix with each layer of recursion. - * - * Function Call #1 => "Dash Export" - * Function Call #2 => "Dash Export/a nested collection" - * Function Call #3 => "Dash Export/a nested collection/lowest level collection" - * ... - */ -async function writeHierarchyRecursive(file: Archiver.Archiver, hierarchy: Hierarchy, prefix = "Dash Export"): Promise { - for (const documentTitle of Object.keys(hierarchy)) { - const result = hierarchy[documentTitle]; - // base case or leaf node, we've hit a url (image) - if (typeof result === "string") { - let path: string; - let matches: RegExpExecArray | null; - if ((matches = /\:1050\/files\/(upload\_[\da-z]{32}.*)/g.exec(result)) !== null) { - // image already exists on our server - path = `${__dirname}/public/files/${matches[1]}`; - } else { - // the image doesn't already exist on our server (may have been dragged - // and dropped in the browser and thus hosted remotely) so we upload it - // to our server and point the zip file to it, so it can bundle up the bytes - const information = await DashUploadUtils.UploadImage(result); - path = information.mediaPaths[0]; - } - // write the file specified by the path to the directory in the - // zip file given by the prefix. - file.file(path, { name: documentTitle, prefix }); - } else { - // we've hit a collection, so we have to recurse - await writeHierarchyRecursive(file, result, `${prefix}/${documentTitle}`); - } - } -} \ No newline at end of file diff --git a/src/server/index.ts b/src/server/index.ts index 569f2e139..57c66bc28 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -13,7 +13,7 @@ import UtilManager from './ApiManagers/UtilManager'; import SearchManager from './ApiManagers/SearchManager'; import UserManager from './ApiManagers/UserManager'; import { WebSocket } from './Websocket/Websocket'; -import DownloadManager from './ApiManagers/ExportManager'; +import DownloadManager from './ApiManagers/DownloadManager'; import { GoogleCredentialsLoader } from './credentials/CredentialsLoader'; import DeleteManager from "./ApiManagers/DeleteManager"; import PDFManager from "./ApiManagers/PDFManager"; -- cgit v1.2.3-70-g09d2 From 025a25a7fbf88c53d153cec19b8f93097116210e Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 19 Nov 2019 13:30:27 -0500 Subject: fixed authorization --- src/client/views/nodes/AudioBox.tsx | 3 +-- src/server/apis/google/GoogleApiServerUtils.ts | 12 +++++++----- src/server/credentials/google_project_credentials.json | 11 ++++------- 3 files changed, 12 insertions(+), 14 deletions(-) (limited to 'src') diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 86bd23b67..77b10e395 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -8,7 +8,6 @@ import { DocExtendableComponent } from "../DocComponent"; import { makeInterface, createSchema } from "../../../new_fields/Schema"; import { documentSchema } from "../../../new_fields/documentSchemas"; import { Utils, returnTrue, emptyFunction, returnOne, returnTransparent } from "../../../Utils"; -import { RouteStore } from "../../../server/RouteStore"; import { runInAction, observable, reaction, IReactionDisposer, computed, action } from "mobx"; import { DateField } from "../../../new_fields/DateField"; import { SelectionManager } from "../../util/SelectionManager"; @@ -140,7 +139,7 @@ export class AudioBox extends DocExtendableComponent { let headerParameters = { headers: { 'Content-Type': 'application/x-www-form-urlencoded' } }; + const { client_id, client_secret } = GoogleCredentialsLoader.ProjectCredentials; let url = `https://oauth2.googleapis.com/token?${qs.stringify({ refreshToken: credentials.refresh_token, - grant_type: "refresh_token", - ...installed + client_id, + client_secret, + grant_type: "refresh_token" })}`; const { access_token, expires_in } = await new Promise(async resolve => { const response = await request.post(url, headerParameters); diff --git a/src/server/credentials/google_project_credentials.json b/src/server/credentials/google_project_credentials.json index 5d9c62eb1..955c5a3c1 100644 --- a/src/server/credentials/google_project_credentials.json +++ b/src/server/credentials/google_project_credentials.json @@ -1,14 +1,11 @@ { "installed": { - "client_id": "1005546247619-kqpnvh42mpa803tem8556b87umi4j9r0.apps.googleusercontent.com", - "project_id": "brown-dash", + "client_id": "343179513178-ud6tvmh275r2fq93u9eesrnc66t6akh9.apps.googleusercontent.com", + "project_id": "quickstart-1565056383187", "auth_uri": "https://accounts.google.com/o/oauth2/auth", "token_uri": "https://oauth2.googleapis.com/token", "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", - "client_secret": "WshLb5TH9SdFVGGbQcnYj7IU", - "redirect_uris": [ - "urn:ietf:wg:oauth:2.0:oob", - "http://localhost" - ] + "client_secret": "w8KIFSc0MQpmUYHed4qEzn8b", + "redirect_uris": ["urn:ietf:wg:oauth:2.0:oob", "http://localhost"] } } \ No newline at end of file -- cgit v1.2.3-70-g09d2 From cabb1c986be9a15cfb650fd365283557c1347737 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 19 Nov 2019 14:28:27 -0500 Subject: commented out traces --- .../views/collections/collectionFreeForm/CollectionFreeFormView.tsx | 2 +- src/client/views/nodes/CollectionFreeFormDocumentView.tsx | 2 +- src/client/views/nodes/DocumentView.tsx | 6 +++--- src/client/views/nodes/FormattedTextBox.tsx | 2 +- src/client/views/nodes/ImageBox.tsx | 2 +- src/client/views/pdf/PDFViewer.tsx | 4 ++-- 6 files changed, 9 insertions(+), 9 deletions(-) (limited to 'src') diff --git a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx index 3b313c34a..59858df8d 100644 --- a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx +++ b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx @@ -858,7 +858,7 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { return eles; } render() { - trace(); + // trace(); // update the actual dimensions of the collection so that they can inquired (e.g., by a minimap) this.Document.fitX = this.contentBounds && this.contentBounds.x; this.Document.fitY = this.contentBounds && this.contentBounds.y; diff --git a/src/client/views/nodes/CollectionFreeFormDocumentView.tsx b/src/client/views/nodes/CollectionFreeFormDocumentView.tsx index d0e1d1922..bbd9859a8 100644 --- a/src/client/views/nodes/CollectionFreeFormDocumentView.tsx +++ b/src/client/views/nodes/CollectionFreeFormDocumentView.tsx @@ -87,7 +87,7 @@ export class CollectionFreeFormDocumentView extends DocComponent this.dataProvider ? this.dataProvider.height : this.panelHeight(); render() { - trace(); + // trace(); return
(Docu @computed get finalLayoutKey() { return this.props.layoutKey || "layout" } childScaling = () => (this.layoutDoc.fitWidth ? this.props.PanelWidth() / this.nativeWidth : this.props.ContentScaling()); @computed get contents() { - trace(); + // trace(); return ((Docu } @computed get innards() { - trace(); + // trace(); const showOverlays = this.props.showOverlays ? this.props.showOverlays(this.Document) : undefined; const showTitle = showOverlays && "title" in showOverlays ? showOverlays.title : this.getLayoutPropStr("showTitle"); const showCaption = showOverlays && "caption" in showOverlays ? showOverlays.caption : this.getLayoutPropStr("showCaption"); @@ -640,7 +640,7 @@ export class DocumentView extends DocComponent(Docu } render() { if (!this.props.Document) return (null); - trace(); + // trace(); const ruleColor = this.props.ruleProvider ? StrCast(this.props.ruleProvider["ruleColor_" + this.Document.heading]) : undefined; const ruleRounding = this.props.ruleProvider ? StrCast(this.props.ruleProvider["ruleRounding_" + this.Document.heading]) : undefined; const colorSet = this.setsLayoutProp("backgroundColor"); diff --git a/src/client/views/nodes/FormattedTextBox.tsx b/src/client/views/nodes/FormattedTextBox.tsx index 4a79a44d4..ef54a387c 100644 --- a/src/client/views/nodes/FormattedTextBox.tsx +++ b/src/client/views/nodes/FormattedTextBox.tsx @@ -1015,7 +1015,7 @@ export class FormattedTextBox extends DocAnnotatableComponent<(FieldViewProps & @computed get sidebarWidth() { return Number(this.sidebarWidthPercent.substring(0, this.sidebarWidthPercent.length - 1)) / 100 * this.props.PanelWidth(); } @computed get annotationsKey() { return "annotations"; } render() { - trace(); + // trace(); let rounded = StrCast(this.layoutDoc.borderRounding) === "100%" ? "-rounded" : ""; let interactive = InkingControl.Instance.selectedTool || this.layoutDoc.isBackground; if (this.props.isSelected()) { diff --git a/src/client/views/nodes/ImageBox.tsx b/src/client/views/nodes/ImageBox.tsx index 4ed350d2b..7a9ebc00a 100644 --- a/src/client/views/nodes/ImageBox.tsx +++ b/src/client/views/nodes/ImageBox.tsx @@ -267,7 +267,7 @@ export class ImageBox extends DocAnnotatableComponent {this.nonDocAnnotations.sort((a, b) => NumCast(a.y) - NumCast(b.y)).map((anno, index) => )} @@ -671,7 +671,7 @@ export class PDFViewer extends DocAnnotatableComponent this.props.PanelHeight() / this.props.ContentScaling() * 72 / 96; contentZoom = () => this._zoomed; render() { - trace(); + // trace(); return !this.extensionDoc ? (null) :
Date: Tue, 19 Nov 2019 16:34:20 -0500 Subject: passport fix --- src/server/authentication/config/passport.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src') diff --git a/src/server/authentication/config/passport.ts b/src/server/authentication/config/passport.ts index 0b15c3a36..726df7fd7 100644 --- a/src/server/authentication/config/passport.ts +++ b/src/server/authentication/config/passport.ts @@ -40,7 +40,7 @@ export let isAuthenticated = (req: Request, res: Response, next: NextFunction) = export let isAuthorized = (req: Request, res: Response, next: NextFunction) => { const provider = req.path.split("/").slice(-1)[0]; - if (_.find((req.user).tokens, { kind: provider })) { + if (_.find((req.user as any).tokens!, { kind: provider })) { next(); } else { res.redirect(`/auth/${provider}`); -- cgit v1.2.3-70-g09d2 From 8b6f70cd0ddac6d2669c3b0624d59a866737497c Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 19 Nov 2019 17:23:55 -0500 Subject: small --- src/server/index.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) (limited to 'src') diff --git a/src/server/index.ts b/src/server/index.ts index 57c66bc28..ad9c5886c 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -56,8 +56,7 @@ async function preliminaryFunctions() { * with the server */ function routeSetter(router: RouteManager) { - // initialize API Managers - [ + const managers = [ new UserManager(), new UploadManager(), new DownloadManager(), @@ -67,7 +66,10 @@ function routeSetter(router: RouteManager) { new UtilManager(), new GeneralGoogleManager(), new GooglePhotosManager(), - ].forEach(manager => manager.register(router)); + ]; + + // initialize API Managers + managers.forEach(manager => manager.register(router)); // initialize the web socket (bidirectional communication: if a user changes // a field on one client, that change must be broadcast to all other clients) -- cgit v1.2.3-70-g09d2 From f194abe2a54158bb041e1e0e7cfa6e22c669629f Mon Sep 17 00:00:00 2001 From: eeng5 Date: Tue, 19 Nov 2019 17:47:28 -0500 Subject: one line --- src/server/ApiManagers/GooglePhotosManager.ts | 3 +++ 1 file changed, 3 insertions(+) (limited to 'src') diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts index 1f6051c28..67eb92a18 100644 --- a/src/server/ApiManagers/GooglePhotosManager.ts +++ b/src/server/ApiManagers/GooglePhotosManager.ts @@ -31,6 +31,9 @@ interface NewMediaItem { } const prefix = "google_photos_"; +/** + * This manager handles the creation of routes for google photos functionality. + */ export default class GooglePhotosManager extends ApiManager { protected initialize(register: Registration): void { -- cgit v1.2.3-70-g09d2 From 9a72d65baaa293fa84592de9bc43d555b5cae319 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 19 Nov 2019 19:52:56 -0500 Subject: logging --- src/Utils.ts | 5 +++++ src/server/ActionUtilities.ts | 31 +++++++++++++++++++++++++++---- src/server/Initialization.ts | 8 +++++++- src/server/Websocket/Websocket.ts | 12 ++++++------ src/server/index.ts | 12 ++++++++++-- 5 files changed, 55 insertions(+), 13 deletions(-) (limited to 'src') diff --git a/src/Utils.ts b/src/Utils.ts index 13d1be767..891a3826b 100644 --- a/src/Utils.ts +++ b/src/Utils.ts @@ -2,6 +2,7 @@ import v4 = require('uuid/v4'); import v5 = require("uuid/v5"); import { Socket } from 'socket.io'; import { Message } from './server/Message'; +import { EventEmitter } from 'events'; export namespace Utils { @@ -238,6 +239,10 @@ export namespace Utils { handler([arg, loggingCallback('S sending', fn, message.Name)]); }); } + + export function InjectLogger() { + + } } export function OmitKeys(obj: any, keys: string[], addKeyFunc?: (dup: any) => void): { omit: any, extract: any } { diff --git a/src/server/ActionUtilities.ts b/src/server/ActionUtilities.ts index b418772a6..5e88ea460 100644 --- a/src/server/ActionUtilities.ts +++ b/src/server/ActionUtilities.ts @@ -27,8 +27,31 @@ export const write_text_file = (relativePath: string, contents: any) => { }); }; -export async function log_execution(startMessage: string, endMessage: string, contents: () => void | Promise) { - console.log('\x1b[36m%s\x1b[0m', `${startMessage}...`); - await contents(); - console.log(endMessage); +export interface LogData { + startMessage: string; + endMessage: string; + action: () => void | Promise; +} + +export async function log_execution({ startMessage, endMessage, action }: LogData) { + const color = `\x1b[${30 + Math.ceil(Math.random() * 6)}m%s\x1b[0m`; + console.log(color, `${startMessage}...`); + await action(); + console.log(color, endMessage); +} + +export enum ConsoleColors { + Black = `\x1b[30m%s\x1b[0m`, + Red = `\x1b[31m%s\x1b[0m`, + Green = `\x1b[32m%s\x1b[0m`, + Yellow = `\x1b[33m%s\x1b[0m`, + Blue = `\x1b[34m%s\x1b[0m`, + Magenta = `\x1b[35m%s\x1b[0m`, + Cyan = `\x1b[36m%s\x1b[0m`, + White = `\x1b[37m%s\x1b[0m` +} + +export function logPort(listener: string, port: number) { + process.stdout.write(`${listener} listening on port `); + console.log(ConsoleColors.Yellow, port); } \ No newline at end of file diff --git a/src/server/Initialization.ts b/src/server/Initialization.ts index 6ac0787c5..357d7837e 100644 --- a/src/server/Initialization.ts +++ b/src/server/Initialization.ts @@ -19,6 +19,7 @@ import * as fs from 'fs'; import * as request from 'request'; import RouteSubscriber from './RouteSubscriber'; import { publicDirectory } from '.'; +import { ConsoleColors, logPort } from './ActionUtilities'; /* RouteSetter is a wrapper around the server that prevents the server from being exposed. */ @@ -35,6 +36,11 @@ export default async function InitializeServer(options: InitializationOptions) { server.use(express.static(publicDirectory)); server.use("/images", express.static(publicDirectory)); + server.use("*", (req, _res, next) => { + console.log(ConsoleColors.Cyan, req.originalUrl, req.user.id); + next(); + }); + server.use(wdm(compiler, { publicPath: config.output.publicPath })); server.use(whm(compiler)); @@ -44,7 +50,7 @@ export default async function InitializeServer(options: InitializationOptions) { const isRelease = determineEnvironment(); //vs. dev mode routeSetter(new RouteManager(server, isRelease)); - server.listen(listenAtPort, () => console.log(`server started at http://localhost:${listenAtPort}`)); + server.listen(listenAtPort, () => logPort("server", listenAtPort)); return isRelease; } diff --git a/src/server/Websocket/Websocket.ts b/src/server/Websocket/Websocket.ts index f6a6c8718..810b3410c 100644 --- a/src/server/Websocket/Websocket.ts +++ b/src/server/Websocket/Websocket.ts @@ -6,9 +6,9 @@ import { Database } from "../database"; import { Search } from "../Search"; import * as io from 'socket.io'; import YoutubeApi from "../apis/youtube/youtubeApiSample"; -import { readFile } from "fs"; -import { Credentials } from "google-auth-library"; import { GoogleCredentialsLoader } from "../credentials/CredentialsLoader"; +import { ConsoleColors, logPort } from "../ActionUtilities"; +import { EventEmitter } from "events"; export namespace WebSocket { @@ -28,10 +28,10 @@ export namespace WebSocket { async function preliminaryFunctions() { } - export function initialize(serverPort: number, isRelease: boolean) { + export function initialize(socketPort: number, isRelease: boolean) { const endpoint = io(); - endpoint.listen(serverPort); - console.log(`listening on port ${serverPort}`); + endpoint.listen(socketPort); + logPort("websocket", socketPort); endpoint.on("connection", function (socket: Socket) { socket.use((_packet, next) => { @@ -92,7 +92,7 @@ export namespace WebSocket { function barReceived(socket: SocketIO.Socket, guid: string) { clients[guid] = new Client(guid.toString()); - console.log(`User ${guid} has connected`); + console.log(ConsoleColors.Green, `User ${guid} has connected`); socketMap.set(socket, guid); } diff --git a/src/server/index.ts b/src/server/index.ts index 01ebf0ac1..618940c1a 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -43,7 +43,11 @@ async function preliminaryFunctions() { // divide the public directory based on type await Promise.all(Object.keys(Partitions).map(partition => DashUploadUtils.createIfNotExists(filesDirectory + partition))); // connect to the database - await Database.tryInitializeConnection(); + await log_execution({ + startMessage: "attempting to initialize mongodb connection", + endMessage: "connection outcome determined", + action: Database.tryInitializeConnection + }); } /** @@ -106,6 +110,10 @@ function routeSetter(router: RouteManager) { } (async function start() { - await log_execution("starting execution of preliminary functions", "completed preliminary functions", preliminaryFunctions); + await log_execution({ + startMessage: "starting execution of preliminary functions", + endMessage: "completed preliminary functions", + action: preliminaryFunctions + }); await initializeServer({ listenAtPort: 1050, routeSetter }); })(); -- cgit v1.2.3-70-g09d2 From cc96b05f8695f84c547438fed7300db0ac6d4266 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 19 Nov 2019 19:53:39 -0500 Subject: missed --- src/Utils.ts | 2 +- src/server/Websocket/Websocket.ts | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) (limited to 'src') diff --git a/src/Utils.ts b/src/Utils.ts index 891a3826b..2c9e50808 100644 --- a/src/Utils.ts +++ b/src/Utils.ts @@ -240,7 +240,7 @@ export namespace Utils { }); } - export function InjectLogger() { + export function InjectLogger(color: string) { } } diff --git a/src/server/Websocket/Websocket.ts b/src/server/Websocket/Websocket.ts index 810b3410c..9f0959e65 100644 --- a/src/server/Websocket/Websocket.ts +++ b/src/server/Websocket/Websocket.ts @@ -59,6 +59,8 @@ export namespace WebSocket { Utils.AddServerHandler(socket, MessageStore.DeleteFields, ids => DeleteFields(socket, ids)); Utils.AddServerHandlerCallback(socket, MessageStore.GetRefField, GetRefField); Utils.AddServerHandlerCallback(socket, MessageStore.GetRefFields, GetRefFields); + + Utils.InjectLogger(ConsoleColors.Magenta); }); } -- cgit v1.2.3-70-g09d2 From 23e72d25506a1cb87b15a34dbf7d85b61bfe063b Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 19 Nov 2019 20:05:12 -0500 Subject: inject logger, not working --- src/Utils.ts | 10 ++++++++-- src/server/Websocket/Websocket.ts | 2 +- 2 files changed, 9 insertions(+), 3 deletions(-) (limited to 'src') diff --git a/src/Utils.ts b/src/Utils.ts index 2c9e50808..1e69cfbfe 100644 --- a/src/Utils.ts +++ b/src/Utils.ts @@ -3,6 +3,7 @@ import v5 = require("uuid/v5"); import { Socket } from 'socket.io'; import { Message } from './server/Message'; import { EventEmitter } from 'events'; +import { ConsoleColors } from './server/ActionUtilities'; export namespace Utils { @@ -240,8 +241,13 @@ export namespace Utils { }); } - export function InjectLogger(color: string) { - + export function InjectLogger(socket: Socket, color: string) { + const on = socket.on; + socket.on = function (event: string | symbol, listener: (...args: any[]) => void) { + console.log(color, event); + on(event, listener); + return socket; + }; } } diff --git a/src/server/Websocket/Websocket.ts b/src/server/Websocket/Websocket.ts index 9f0959e65..308b32988 100644 --- a/src/server/Websocket/Websocket.ts +++ b/src/server/Websocket/Websocket.ts @@ -60,7 +60,7 @@ export namespace WebSocket { Utils.AddServerHandlerCallback(socket, MessageStore.GetRefField, GetRefField); Utils.AddServerHandlerCallback(socket, MessageStore.GetRefFields, GetRefFields); - Utils.InjectLogger(ConsoleColors.Magenta); + Utils.InjectLogger(socket, ConsoleColors.Yellow); }); } -- cgit v1.2.3-70-g09d2 From 6ddb23d48a8c97cbe12f841791825567c3a48e69 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 19 Nov 2019 22:17:34 -0500 Subject: removed call --- src/server/Websocket/Websocket.ts | 2 -- 1 file changed, 2 deletions(-) (limited to 'src') diff --git a/src/server/Websocket/Websocket.ts b/src/server/Websocket/Websocket.ts index 308b32988..810b3410c 100644 --- a/src/server/Websocket/Websocket.ts +++ b/src/server/Websocket/Websocket.ts @@ -59,8 +59,6 @@ export namespace WebSocket { Utils.AddServerHandler(socket, MessageStore.DeleteFields, ids => DeleteFields(socket, ids)); Utils.AddServerHandlerCallback(socket, MessageStore.GetRefField, GetRefField); Utils.AddServerHandlerCallback(socket, MessageStore.GetRefFields, GetRefFields); - - Utils.InjectLogger(socket, ConsoleColors.Yellow); }); } -- cgit v1.2.3-70-g09d2 From 1722788bf7d672043b1d51026b553426459bbd22 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 20 Nov 2019 19:17:48 -0500 Subject: small --- src/Utils.ts | 9 --------- src/server/Initialization.ts | 2 +- src/server/Websocket/Websocket.ts | 9 +++------ 3 files changed, 4 insertions(+), 16 deletions(-) (limited to 'src') diff --git a/src/Utils.ts b/src/Utils.ts index 1e69cfbfe..fc12c1e14 100644 --- a/src/Utils.ts +++ b/src/Utils.ts @@ -240,15 +240,6 @@ export namespace Utils { handler([arg, loggingCallback('S sending', fn, message.Name)]); }); } - - export function InjectLogger(socket: Socket, color: string) { - const on = socket.on; - socket.on = function (event: string | symbol, listener: (...args: any[]) => void) { - console.log(color, event); - on(event, listener); - return socket; - }; - } } export function OmitKeys(obj: any, keys: string[], addKeyFunc?: (dup: any) => void): { omit: any, extract: any } { diff --git a/src/server/Initialization.ts b/src/server/Initialization.ts index 357d7837e..08b476822 100644 --- a/src/server/Initialization.ts +++ b/src/server/Initialization.ts @@ -37,7 +37,7 @@ export default async function InitializeServer(options: InitializationOptions) { server.use("/images", express.static(publicDirectory)); server.use("*", (req, _res, next) => { - console.log(ConsoleColors.Cyan, req.originalUrl, req.user.id); + console.log(ConsoleColors.Cyan, req.originalUrl, req.user.email); next(); }); diff --git a/src/server/Websocket/Websocket.ts b/src/server/Websocket/Websocket.ts index 810b3410c..74a6b4263 100644 --- a/src/server/Websocket/Websocket.ts +++ b/src/server/Websocket/Websocket.ts @@ -8,7 +8,6 @@ import * as io from 'socket.io'; import YoutubeApi from "../apis/youtube/youtubeApiSample"; import { GoogleCredentialsLoader } from "../credentials/CredentialsLoader"; import { ConsoleColors, logPort } from "../ActionUtilities"; -import { EventEmitter } from "events"; export namespace WebSocket { @@ -30,9 +29,6 @@ export namespace WebSocket { export function initialize(socketPort: number, isRelease: boolean) { const endpoint = io(); - endpoint.listen(socketPort); - logPort("websocket", socketPort); - endpoint.on("connection", function (socket: Socket) { socket.use((_packet, next) => { let id = socketMap.get(socket); @@ -60,7 +56,8 @@ export namespace WebSocket { Utils.AddServerHandlerCallback(socket, MessageStore.GetRefField, GetRefField); Utils.AddServerHandlerCallback(socket, MessageStore.GetRefFields, GetRefFields); }); - + endpoint.listen(socketPort); + logPort("websocket", socketPort); } function HandleYoutubeQuery([query, callback]: [YoutubeQueryInput, (result?: any[]) => void]) { @@ -92,7 +89,7 @@ export namespace WebSocket { function barReceived(socket: SocketIO.Socket, guid: string) { clients[guid] = new Client(guid.toString()); - console.log(ConsoleColors.Green, `User ${guid} has connected`); + console.log(ConsoleColors.Green, `user ${guid} has connected to the web socket`); socketMap.set(socket, guid); } -- cgit v1.2.3-70-g09d2 From 39bac937106e77679b2dc76078b812a6b6b11a94 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 21 Nov 2019 18:20:01 -0500 Subject: last cleanup --- src/server/ActionUtilities.ts | 3 ++- src/server/ApiManagers/ApiManager.ts | 4 ++-- src/server/ApiManagers/UploadManager.ts | 4 ++-- src/server/DashUploadUtils.ts | 12 ++++++++++++ src/server/index.ts | 25 ++++++++----------------- 5 files changed, 26 insertions(+), 22 deletions(-) (limited to 'src') diff --git a/src/server/ActionUtilities.ts b/src/server/ActionUtilities.ts index 5e88ea460..a5f33833d 100644 --- a/src/server/ActionUtilities.ts +++ b/src/server/ActionUtilities.ts @@ -33,8 +33,9 @@ export interface LogData { action: () => void | Promise; } +let current = Math.ceil(Math.random() * 20); export async function log_execution({ startMessage, endMessage, action }: LogData) { - const color = `\x1b[${30 + Math.ceil(Math.random() * 6)}m%s\x1b[0m`; + const color = `\x1b[${31 + current++ % 6}m%s\x1b[0m`; console.log(color, `${startMessage}...`); await action(); console.log(color, endMessage); diff --git a/src/server/ApiManagers/ApiManager.ts b/src/server/ApiManagers/ApiManager.ts index 9fd726060..e2b01d585 100644 --- a/src/server/ApiManagers/ApiManager.ts +++ b/src/server/ApiManagers/ApiManager.ts @@ -5,7 +5,7 @@ export type Registration = (initializer: RouteInitializer) => void; export default abstract class ApiManager { protected abstract initialize(register: Registration): void; - public register(router: RouteManager) { - this.initialize(router.addSupervisedRoute); + public register(register: Registration) { + this.initialize(register); } } \ No newline at end of file diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index 01abdab54..aca63a918 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -5,7 +5,7 @@ import v4 = require('uuid/v4'); var AdmZip = require('adm-zip'); import * as path from 'path'; import { createReadStream, createWriteStream, unlink, readFileSync } from "fs"; -import { publicDirectory, filesDirectory, Partitions } from ".."; +import { publicDirectory, filesDirectory } from ".."; import { Database } from "../database"; import { DashUploadUtils } from "../DashUploadUtils"; import { Opt } from "../../new_fields/Doc"; @@ -142,7 +142,7 @@ export default class UploadManager extends ApiManager { let dataBuffer = readFileSync(filesDirectory + filename); const result: ParsedPDF = await pdf(dataBuffer); await new Promise((resolve, reject) => { - const path = filesDirectory + Partitions.pdf_text + "/" + filename.substring(0, filename.length - ".pdf".length) + ".txt"; + const path = filesDirectory + DashUploadUtils.Partitions.pdf_text + "/" + filename.substring(0, filename.length - ".pdf".length) + ".txt"; createWriteStream(path).write(result.text, error => { if (!error) { resolve(); diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index 8f5b0e1a8..8a429b81b 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -6,6 +6,7 @@ import request = require('request-promise'); import { ExifData, ExifImage } from 'exif'; import { Opt } from '../new_fields/Doc'; import { SharedMediaTypes } from './SharedMediaTypes'; +import { filesDirectory } from '.'; const uploadDirectory = path.join(__dirname, './public/files/'); @@ -89,6 +90,17 @@ export namespace DashUploadUtils { error?: string; } + export enum Partitions { + pdf_text, + images, + videos + } + + export async function buildFilePartitions() { + const pending = Object.keys(Partitions).map(sub => createIfNotExists(filesDirectory + sub)); + return Promise.all(pending); + } + /** * Based on the url's classification as local or remote, gleans * as much information as possible about the specified image diff --git a/src/server/index.ts b/src/server/index.ts index 618940c1a..9c48aca45 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -22,13 +22,8 @@ import { log_execution } from "./ActionUtilities"; import GeneralGoogleManager from "./ApiManagers/GeneralGoogleManager"; import GooglePhotosManager from "./ApiManagers/GooglePhotosManager"; -export const publicDirectory = __dirname + "/public"; -export const filesDirectory = publicDirectory + "/files/"; -export enum Partitions { - pdf_text, - images, - videos -} +export const publicDirectory = path.resolve(__dirname, "public"); +export const filesDirectory = path.resolve(publicDirectory, "files") + "/"; /** * These are the functions run before the server starts @@ -36,13 +31,9 @@ export enum Partitions { * before clients can access the server should be run or awaited here. */ async function preliminaryFunctions() { - // make project credentials globally accessible await GoogleCredentialsLoader.loadCredentials(); - // read the resulting credentials into a different namespace GoogleApiServerUtils.processProjectCredentials(); - // divide the public directory based on type - await Promise.all(Object.keys(Partitions).map(partition => DashUploadUtils.createIfNotExists(filesDirectory + partition))); - // connect to the database + await DashUploadUtils.buildFilePartitions(); await log_execution({ startMessage: "attempting to initialize mongodb connection", endMessage: "connection outcome determined", @@ -59,7 +50,7 @@ async function preliminaryFunctions() { * that will manage the registration of new routes * with the server */ -function routeSetter(router: RouteManager) { +function routeSetter({ isRelease, addSupervisedRoute }: RouteManager) { const managers = [ new UserManager(), new UploadManager(), @@ -73,16 +64,16 @@ function routeSetter(router: RouteManager) { ]; // initialize API Managers - managers.forEach(manager => manager.register(router)); + managers.forEach(manager => manager.register(addSupervisedRoute)); // initialize the web socket (bidirectional communication: if a user changes // a field on one client, that change must be broadcast to all other clients) - WebSocket.initialize(serverPort, router.isRelease); + WebSocket.initialize(serverPort, isRelease); /** * Accessing root index redirects to home */ - router.addSupervisedRoute({ + addSupervisedRoute({ method: Method.GET, subscription: "/", onValidation: ({ res }) => res.redirect("/home") @@ -94,7 +85,7 @@ function routeSetter(router: RouteManager) { res.sendFile(path.join(__dirname, '../../deploy/' + filename)); }; - router.addSupervisedRoute({ + addSupervisedRoute({ method: Method.GET, subscription: ["/home", new RouteSubscriber("doc").add("docId")], onValidation: serve, -- cgit v1.2.3-70-g09d2 From b831be86743e329cce441b3d7ae2aa5321e7fb9c Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sat, 23 Nov 2019 17:09:13 -0500 Subject: improved user activity log --- src/server/ActionUtilities.ts | 13 ++++ src/server/ApiManagers/UserManager.ts | 46 ++++++------ src/server/Initialization.ts | 8 ++- src/server/Websocket/Websocket.ts | 22 +++--- .../authentication/controllers/user_controller.ts | 3 - views/login.pug | 2 - views/stylesheets/authentication.css | 81 ++++++++++++++++++++++ views/user_activity.pug | 19 +++++ 8 files changed, 155 insertions(+), 39 deletions(-) create mode 100644 views/user_activity.pug (limited to 'src') diff --git a/src/server/ActionUtilities.ts b/src/server/ActionUtilities.ts index a5f33833d..7f493dd70 100644 --- a/src/server/ActionUtilities.ts +++ b/src/server/ActionUtilities.ts @@ -55,4 +55,17 @@ export enum ConsoleColors { export function logPort(listener: string, port: number) { process.stdout.write(`${listener} listening on port `); console.log(ConsoleColors.Yellow, port); +} + +export function msToTime(duration: number) { + let milliseconds = Math.floor((duration % 1000) / 100), + seconds = Math.floor((duration / 1000) % 60), + minutes = Math.floor((duration / (1000 * 60)) % 60), + hours = Math.floor((duration / (1000 * 60 * 60)) % 24); + + let hoursS = (hours < 10) ? "0" + hours : hours; + let minutesS = (minutes < 10) ? "0" + minutes : minutes; + let secondsS = (seconds < 10) ? "0" + seconds : seconds; + + return hoursS + ":" + minutesS + ":" + secondsS + "." + milliseconds; } \ No newline at end of file diff --git a/src/server/ApiManagers/UserManager.ts b/src/server/ApiManagers/UserManager.ts index 51a434fcf..4ee5a2b85 100644 --- a/src/server/ApiManagers/UserManager.ts +++ b/src/server/ApiManagers/UserManager.ts @@ -1,7 +1,14 @@ import ApiManager, { Registration } from "./ApiManager"; import { Method } from "../RouteManager"; -import { WebSocket } from "../Websocket/Websocket"; import { Database } from "../database"; +import { msToTime } from "../ActionUtilities"; + +export const timeMap: { [id: string]: number } = {}; +interface ActivityUnit { + user: string; + duration: number; +} + export default class UserManager extends ApiManager { @@ -32,35 +39,34 @@ export default class UserManager extends ApiManager { register({ method: Method.GET, - subscription: "/whosOnline", + subscription: "/activity", onValidation: ({ res }) => { - let users: any = { active: {}, inactive: {} }; const now = Date.now(); - const { timeMap } = WebSocket; + const activeTimes: ActivityUnit[] = []; + const inactiveTimes: ActivityUnit[] = []; + for (const user in timeMap) { const time = timeMap[user]; - const key = ((now - time) / 1000) < (60 * 5) ? "active" : "inactive"; - users[key][user] = `Last active ${msToTime(now - time)} ago`; + const duration = now - time; + const target = (duration / 1000) < (60 * 5) ? activeTimes : inactiveTimes; + target.push({ user, duration }); } - res.send(users); + const process = (target: { user: string, duration: number }[]) => { + const comparator = (first: ActivityUnit, second: ActivityUnit) => first.duration - second.duration; + const sorted = target.sort(comparator); + return sorted.map(({ user, duration }) => `${user} (last active ${msToTime(duration)} ago)`); + }; + + res.render("user_activity.pug", { + title: "User Activity", + active: process(activeTimes), + inactive: process(inactiveTimes) + }); } }); } -} - -function msToTime(duration: number) { - let milliseconds = Math.floor((duration % 1000) / 100), - seconds = Math.floor((duration / 1000) % 60), - minutes = Math.floor((duration / (1000 * 60)) % 60), - hours = Math.floor((duration / (1000 * 60 * 60)) % 24); - - let hoursS = (hours < 10) ? "0" + hours : hours; - let minutesS = (minutes < 10) ? "0" + minutes : minutes; - let secondsS = (seconds < 10) ? "0" + seconds : seconds; - - return hoursS + ":" + minutesS + ":" + secondsS + "." + milliseconds; } \ No newline at end of file diff --git a/src/server/Initialization.ts b/src/server/Initialization.ts index 08b476822..7fad5556d 100644 --- a/src/server/Initialization.ts +++ b/src/server/Initialization.ts @@ -20,6 +20,8 @@ import * as request from 'request'; import RouteSubscriber from './RouteSubscriber'; import { publicDirectory } from '.'; import { ConsoleColors, logPort } from './ActionUtilities'; +import { WebSocket } from './Websocket/Websocket'; +import { timeMap } from './ApiManagers/UserManager'; /* RouteSetter is a wrapper around the server that prevents the server from being exposed. */ @@ -37,7 +39,11 @@ export default async function InitializeServer(options: InitializationOptions) { server.use("/images", express.static(publicDirectory)); server.use("*", (req, _res, next) => { - console.log(ConsoleColors.Cyan, req.originalUrl, req.user.email); + const userEmail = req.user?.email; + console.log(ConsoleColors.Cyan, req.originalUrl, userEmail ?? ""); + if (userEmail) { + timeMap[userEmail] = Date.now(); + } next(); }); diff --git a/src/server/Websocket/Websocket.ts b/src/server/Websocket/Websocket.ts index 74a6b4263..fbf71f707 100644 --- a/src/server/Websocket/Websocket.ts +++ b/src/server/Websocket/Websocket.ts @@ -8,16 +8,12 @@ import * as io from 'socket.io'; import YoutubeApi from "../apis/youtube/youtubeApiSample"; import { GoogleCredentialsLoader } from "../credentials/CredentialsLoader"; import { ConsoleColors, logPort } from "../ActionUtilities"; +import { timeMap } from "../ApiManagers/UserManager"; export namespace WebSocket { - interface Map { - [key: string]: Client; - } - let clients: Map = {}; - + let clients: { [key: string]: Client } = {}; export const socketMap = new Map(); - export const timeMap: { [id: string]: number } = {}; export async function start(serverPort: number, isRelease: boolean) { await preliminaryFunctions(); @@ -31,9 +27,9 @@ export namespace WebSocket { const endpoint = io(); endpoint.on("connection", function (socket: Socket) { socket.use((_packet, next) => { - let id = socketMap.get(socket); - if (id) { - timeMap[id] = Date.now(); + let userEmail = socketMap.get(socket); + if (userEmail) { + timeMap[userEmail] = Date.now(); } next(); }); @@ -87,10 +83,10 @@ export namespace WebSocket { await Search.Instance.clear(); } - function barReceived(socket: SocketIO.Socket, guid: string) { - clients[guid] = new Client(guid.toString()); - console.log(ConsoleColors.Green, `user ${guid} has connected to the web socket`); - socketMap.set(socket, guid); + function barReceived(socket: SocketIO.Socket, userEmail: string) { + clients[userEmail] = new Client(userEmail.toString()); + console.log(ConsoleColors.Green, `user ${userEmail} has connected to the web socket`); + socketMap.set(socket, userEmail); } function getField([id, callback]: [string, (result?: Transferable) => void]) { diff --git a/src/server/authentication/controllers/user_controller.ts b/src/server/authentication/controllers/user_controller.ts index b2b9d33f6..517353479 100644 --- a/src/server/authentication/controllers/user_controller.ts +++ b/src/server/authentication/controllers/user_controller.ts @@ -3,10 +3,7 @@ import { Request, Response, NextFunction } from "express"; import * as passport from "passport"; import { IVerifyOptions } from "passport-local"; import "../config/passport"; -import * as request from "express-validator"; import flash = require("express-flash"); -import * as session from "express-session"; -import * as pug from 'pug'; import * as async from 'async'; import * as nodemailer from 'nodemailer'; import c = require("crypto"); diff --git a/views/login.pug b/views/login.pug index 9bc40a495..26da5e29e 100644 --- a/views/login.pug +++ b/views/login.pug @@ -14,11 +14,9 @@ block content .inner.login h3.auth_header Log In .form-group - //- label.col-sm-3.control-label(for='email', id='email_label') Email .col-sm-7 input.form-control(type='email', name='email', id='email', placeholder='Email', autofocus, required) .form-group - //- label.col-sm-3.control-label(for='password') Password .col-sm-7 input.form-control(type='password', name='password', id='password', placeholder='Password', required) .form-group diff --git a/views/stylesheets/authentication.css b/views/stylesheets/authentication.css index 36bb880af..ff1f4aace 100644 --- a/views/stylesheets/authentication.css +++ b/views/stylesheets/authentication.css @@ -139,4 +139,85 @@ body { padding-right: 10px; font-family: Arial, Helvetica, sans-serif; font-size: 16px; +} + +.outermost, .online-container { + display: flex; + flex-direction: row; + height: 98vh; + justify-content: center; +} + +.online-container { + background: white; + display: flex; + flex-direction: row; + height: 80%; + width: 80%; + align-self: center; + justify-content: center; + border-radius: 8px; + box-shadow: 10px 10px 10px #00000099; +} + +.partition { + width: 50%; + display: flex; + flex-direction: column; + border: 1px solid black; +} + +.inner-activity { + display: flex; + flex-direction: column; + justify-content: center; + height: 100%; + border-top: 2px solid black; + background: white; + padding: 20px; + overflow: scroll; +} + +ol { + align-self: center; +} + +li { + font-family: Arial, Helvetica, sans-serif; + border: 1px solid black; + padding: 10px; + border-radius: 5px; + margin-bottom: 5px; +} + +.duration { + font-style: italic; +} + +span.user-type { + align-self: center; + font-family: Arial, Helvetica, sans-serif; + font-weight: bold; + font-size: 20px; + margin: 50px; +} + +#active-partition { + background: green; + border-top-left-radius: 8px; + border-bottom-left-radius: 8px; +} + +#active-inner { + border-bottom-left-radius: 8px; +} + +#inactive-partition { + background: red; + border-top-right-radius: 8px; + border-bottom-right-radius: 8px; +} + +#inactive-inner { + border-bottom-right-radius: 8px; } \ No newline at end of file diff --git a/views/user_activity.pug b/views/user_activity.pug new file mode 100644 index 000000000..68e42140d --- /dev/null +++ b/views/user_activity.pug @@ -0,0 +1,19 @@ +extends ./layout + +block content + style + include ./stylesheets/authentication.css + .outermost + .online-container + .partition(id="active-partition") + span.user-type Active Users + .inner-activity(id="active-inner") + ol + each val in active + li= val + .partition(id="inactive-partition") + span.user-type Inactive Users + .inner-activity(id="inactive-inner") + ol + each val in inactive + li= val \ No newline at end of file -- cgit v1.2.3-70-g09d2 From e324248724a130a84b459a072dc846f500f8d9b0 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sat, 23 Nov 2019 18:19:27 -0500 Subject: heartbeats --- src/client/DocServer.ts | 2 ++ src/client/util/ClientDiagnostics.ts | 29 +++++++++++++++++++++++++++++ src/client/views/Main.tsx | 2 ++ src/client/views/MainView.tsx | 6 ++++-- src/server/ApiManagers/DiagnosticManager.ts | 26 ++++++++++++++++++++++++++ src/server/ApiManagers/UserManager.ts | 2 +- src/server/Initialization.ts | 13 +++++++------ src/server/index.ts | 2 ++ 8 files changed, 73 insertions(+), 9 deletions(-) create mode 100644 src/client/util/ClientDiagnostics.ts create mode 100644 src/server/ApiManagers/DiagnosticManager.ts (limited to 'src') diff --git a/src/client/DocServer.ts b/src/client/DocServer.ts index 2cec1046b..14479694c 100644 --- a/src/client/DocServer.ts +++ b/src/client/DocServer.ts @@ -64,6 +64,8 @@ export namespace DocServer { } } + let connection_error = false; + export function init(protocol: string, hostname: string, port: number, identifier: string) { _cache = {}; GUID = identifier; diff --git a/src/client/util/ClientDiagnostics.ts b/src/client/util/ClientDiagnostics.ts new file mode 100644 index 000000000..e454cdecb --- /dev/null +++ b/src/client/util/ClientDiagnostics.ts @@ -0,0 +1,29 @@ +import { observable, runInAction } from "mobx"; +import { MainView } from "../views/MainView"; + +export namespace ClientDiagnostics { + + export function start() { + + let serverPolls = 0; + const serverHandle = setInterval(async () => { + if (++serverPolls === 20) { + alert("Your connection to the server has been terminated."); + clearInterval(serverHandle); + } + await fetch("/serverHeartbeat"); + serverPolls--; + }, 100); + + + const solrHandle = setInterval(async () => { + const response = await fetch("/solrHeartbeat"); + if (!response) { + alert("Looks like SOLR is not running on your machine."); + clearInterval(solrHandle); + } + }, 100); + + } + +} \ No newline at end of file diff --git a/src/client/views/Main.tsx b/src/client/views/Main.tsx index b21eb9c8f..dec4a24e4 100644 --- a/src/client/views/Main.tsx +++ b/src/client/views/Main.tsx @@ -5,10 +5,12 @@ import * as ReactDOM from 'react-dom'; import * as React from 'react'; import { DocServer } from "../DocServer"; import { AssignAllExtensions } from "../../extensions/General/Extensions"; +import { ClientDiagnostics } from "../util/ClientDiagnostics"; AssignAllExtensions(); (async () => { + ClientDiagnostics.start(); const info = await CurrentUserUtils.loadCurrentUser(); DocServer.init(window.location.protocol, window.location.hostname, 4321, info.email); await Docs.Prototypes.initialize(); diff --git a/src/client/views/MainView.tsx b/src/client/views/MainView.tsx index 031926604..d352ad776 100644 --- a/src/client/views/MainView.tsx +++ b/src/client/views/MainView.tsx @@ -4,7 +4,7 @@ import { faMusic, faObjectGroup, faPause, faMousePointer, faPenNib, faFileAudio, faPen, faEraser, faPlay, faPortrait, faRedoAlt, faThumbtack, faTree, faTv, faUndoAlt, faHighlighter, faMicrophone, faCompressArrowsAlt } from '@fortawesome/free-solid-svg-icons'; import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; -import { action, computed, configure, observable, reaction, runInAction } from 'mobx'; +import { action, computed, configure, observable, reaction, runInAction, autorun } from 'mobx'; import { observer } from 'mobx-react'; import "normalize.css"; import * as React from 'react'; @@ -414,7 +414,8 @@ export class MainView extends React.Component { -
; + + ; } @computed get mainContent() { @@ -422,6 +423,7 @@ export class MainView extends React.Component { return !this.userDoc || !(sidebar instanceof Doc) ? (null) : (
+
HEY!
diff --git a/src/server/ApiManagers/DiagnosticManager.ts b/src/server/ApiManagers/DiagnosticManager.ts new file mode 100644 index 000000000..b775167b6 --- /dev/null +++ b/src/server/ApiManagers/DiagnosticManager.ts @@ -0,0 +1,26 @@ +import ApiManager, { Registration } from "./ApiManager"; +import { Method } from "../RouteManager"; +import request = require('request-promise'); + +export default class DiagnosticManager extends ApiManager { + + protected initialize(register: Registration): void { + + register({ + method: Method.GET, + subscription: "/serverHeartbeat", + onValidation: ({ res }) => res.send(true) + }); + + register({ + method: Method.GET, + subscription: "/solrHeartbeat", + onValidation: async ({ res }) => { + const response = await request("http://localhost:8983"); + res.send(response !== undefined); + } + }); + + } + +} \ No newline at end of file diff --git a/src/server/ApiManagers/UserManager.ts b/src/server/ApiManagers/UserManager.ts index 4ee5a2b85..8edeab16d 100644 --- a/src/server/ApiManagers/UserManager.ts +++ b/src/server/ApiManagers/UserManager.ts @@ -56,7 +56,7 @@ export default class UserManager extends ApiManager { const process = (target: { user: string, duration: number }[]) => { const comparator = (first: ActivityUnit, second: ActivityUnit) => first.duration - second.duration; const sorted = target.sort(comparator); - return sorted.map(({ user, duration }) => `${user} (last active ${msToTime(duration)} ago)`); + return sorted.map(({ user, duration }) => `${user} (${msToTime(duration)})`); }; res.render("user_activity.pug", { diff --git a/src/server/Initialization.ts b/src/server/Initialization.ts index 7fad5556d..76acb4363 100644 --- a/src/server/Initialization.ts +++ b/src/server/Initialization.ts @@ -20,7 +20,6 @@ import * as request from 'request'; import RouteSubscriber from './RouteSubscriber'; import { publicDirectory } from '.'; import { ConsoleColors, logPort } from './ActionUtilities'; -import { WebSocket } from './Websocket/Websocket'; import { timeMap } from './ApiManagers/UserManager'; /* RouteSetter is a wrapper around the server that prevents the server @@ -38,11 +37,13 @@ export default async function InitializeServer(options: InitializationOptions) { server.use(express.static(publicDirectory)); server.use("/images", express.static(publicDirectory)); - server.use("*", (req, _res, next) => { - const userEmail = req.user?.email; - console.log(ConsoleColors.Cyan, req.originalUrl, userEmail ?? ""); - if (userEmail) { - timeMap[userEmail] = Date.now(); + server.use("*", ({ user, originalUrl }, _res, next) => { + if (!originalUrl.includes("Heartbeat")) { + const userEmail = user?.email; + console.log(ConsoleColors.Cyan, originalUrl, userEmail ?? ""); + if (userEmail) { + timeMap[userEmail] = Date.now(); + } } next(); }); diff --git a/src/server/index.ts b/src/server/index.ts index 9c48aca45..d02a6005e 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -21,6 +21,7 @@ import UploadManager from "./ApiManagers/UploadManager"; import { log_execution } from "./ActionUtilities"; import GeneralGoogleManager from "./ApiManagers/GeneralGoogleManager"; import GooglePhotosManager from "./ApiManagers/GooglePhotosManager"; +import DiagnosticManager from "./ApiManagers/DiagnosticManager"; export const publicDirectory = path.resolve(__dirname, "public"); export const filesDirectory = path.resolve(publicDirectory, "files") + "/"; @@ -55,6 +56,7 @@ function routeSetter({ isRelease, addSupervisedRoute }: RouteManager) { new UserManager(), new UploadManager(), new DownloadManager(), + new DiagnosticManager(), new SearchManager(), new PDFManager(), new DeleteManager(), -- cgit v1.2.3-70-g09d2 From 2a84a002b06bdff969483f19390bf5a6d416d3a9 Mon Sep 17 00:00:00 2001 From: Bob Zeleznik Date: Sun, 24 Nov 2019 23:39:44 -0500 Subject: lots of fixes for full screen image/pdf/video views. fixes to image box fade image. --- src/client/documents/Documents.ts | 3 +- .../views/collections/CollectionDockingView.tsx | 4 +- .../views/nodes/ContentFittingDocumentView.tsx | 2 +- src/client/views/nodes/ImageBox.scss | 39 +-- src/client/views/nodes/ImageBox.tsx | 33 ++- src/client/views/nodes/PDFBox.scss | 306 +++++++++++---------- src/client/views/nodes/PDFBox.tsx | 25 +- src/client/views/nodes/VideoBox.scss | 5 +- src/client/views/nodes/VideoBox.tsx | 58 ++-- src/client/views/pdf/PDFViewer.scss | 4 +- src/client/views/pdf/PDFViewer.tsx | 9 +- 11 files changed, 256 insertions(+), 232 deletions(-) (limited to 'src') diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index c5bf109a1..3f223c281 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -215,7 +215,8 @@ export namespace Docs { layout: { view: PresElementBox, dataField: data } }], [DocumentType.INK, { - layout: { view: InkingStroke, dataField: data } + layout: { view: InkingStroke, dataField: data }, + options: { backgroundColor: "transparent" } }] ]); diff --git a/src/client/views/collections/CollectionDockingView.tsx b/src/client/views/collections/CollectionDockingView.tsx index 75d92105b..a99688017 100644 --- a/src/client/views/collections/CollectionDockingView.tsx +++ b/src/client/views/collections/CollectionDockingView.tsx @@ -649,6 +649,7 @@ export class DockedFrameRenderer extends React.Component { return Transform.Identity(); } get previewPanelCenteringOffset() { return this.nativeWidth() && !this.layoutDoc!.ignoreAspect ? (this._panelWidth - this.nativeWidth() * this.contentScaling()) / 2 : 0; } + get widthpercent() { return this.nativeWidth() && !this.layoutDoc!.ignoreAspect ? `${(this.nativeWidth() * this.contentScaling()) / this.panelWidth() * 100}%` : undefined; } addDocTab = (doc: Doc, dataDoc: Opt, location: string) => { SelectionManager.DeselectAll(); @@ -697,7 +698,8 @@ export class DockedFrameRenderer extends React.Component { (
this._mainCont = ref} style={{ transform: `translate(${this.previewPanelCenteringOffset}px, 0px)`, - height: this.layoutDoc && this.layoutDoc.fitWidth ? undefined : "100%" + height: this.layoutDoc && this.layoutDoc.fitWidth ? undefined : "100%", + width: this.widthpercent }}> {this.docView}
); diff --git a/src/client/views/nodes/ContentFittingDocumentView.tsx b/src/client/views/nodes/ContentFittingDocumentView.tsx index 573a55710..a5f96d2de 100644 --- a/src/client/views/nodes/ContentFittingDocumentView.tsx +++ b/src/client/views/nodes/ContentFittingDocumentView.tsx @@ -82,7 +82,7 @@ export class ContentFittingDocumentView extends React.Component -
- +
+ - {fadepath === srcpath ? (null) :
} + {fadepath === srcpath ? (null) :
+
}
[this.content]; render() { - return (
+ return (
this.gotoPage(Number(e.currentTarget.value))} - style={{ left: 5, top: 5, height: "30px", width: "30px", position: "absolute", pointerEvents: "all" }} + style={{ left: 5, top: 5, height: "20px", width: "20px", position: "absolute", pointerEvents: "all" }} onClick={action(() => this._pageControls = !this._pageControls)} /> {this._pageControls ? pageBtns : (null)}
e.stopPropagation()}>
@@ -193,10 +193,15 @@ export class PDFBox extends DocAnnotatableComponent ContextMenu.Instance.addItem({ description: "Pdf Funcs...", subitems: funcs, icon: "asterisk" }); } + @computed get contentScaling() { return this.props.ContentScaling(); } @computed get renderTitleBox() { - let classname = "pdfBox-cont" + (this.active() ? "-interactive" : ""); - return
-
+ let classname = "pdfBox" + (this.active() ? "-interactive" : ""); + return
+
{this.props.Document.title}
; @@ -205,7 +210,7 @@ export class PDFBox extends DocAnnotatableComponent isChildActive = (outsideReaction?: boolean) => this._isChildActive; @computed get renderPdfView() { const pdfUrl = Cast(this.dataDoc[this.props.fieldKey], PdfField); - return
+ return
+ return ([
{"" + Math.round(curTime)} {" " + Math.round((curTime - Math.trunc(curTime)) * 100)}
, -
+
, VideoBox._showControls ? (null) : [ -
+
, -
+
F
]]); @@ -335,30 +335,32 @@ export class VideoBox extends DocAnnotatableComponent [this.youtubeVideoId ? this.youtubeContent : this.content]; render() { - return (
- - {this.contentFunc} - +
+ + {this.contentFunc} + +
{this.uIButtons}
); } diff --git a/src/client/views/pdf/PDFViewer.scss b/src/client/views/pdf/PDFViewer.scss index ac018aa0e..4f81c6f70 100644 --- a/src/client/views/pdf/PDFViewer.scss +++ b/src/client/views/pdf/PDFViewer.scss @@ -1,5 +1,5 @@ -.pdfViewer-viewer, .pdfViewer-viewer-zoomed { +.pdfViewer, .pdfViewer-zoomed { pointer-events: all; width: 100%; height: 100%; @@ -91,7 +91,7 @@ z-index: 10; } } -.pdfViewer-viewer-zoomed { +.pdfViewer-zoomed { overflow-x: scroll; } \ No newline at end of file diff --git a/src/client/views/pdf/PDFViewer.tsx b/src/client/views/pdf/PDFViewer.tsx index f1c500391..b737ce221 100644 --- a/src/client/views/pdf/PDFViewer.tsx +++ b/src/client/views/pdf/PDFViewer.tsx @@ -660,6 +660,7 @@ export class PDFViewer extends DocAnnotatableComponent; } + @computed get contentScaling() { return this.props.ContentScaling(); } @computed get standinViews() { return <> {this._showCover ? this.getCoverImage() : (null)} @@ -673,16 +674,16 @@ export class PDFViewer extends DocAnnotatableComponent this._marqueeing; visibleHeight = () => this.props.PanelHeight() / this.props.ContentScaling() * 72 / 96; contentZoom = () => this._zoomed; - @computed get contentScaling() { return this.props.ContentScaling(); } render() { TraceMobx(); return !this.extensionDoc ? (null) : -
+ transform: `scale(${this.props.ContentScaling()})` + }} > {this.pdfViewerDiv} {this.overlayLayer} {this.annotationLayer} -- cgit v1.2.3-70-g09d2 From eb58759c7304bb9bdce2e1c4804a2e164eb25bcc Mon Sep 17 00:00:00 2001 From: Bob Zeleznik Date: Mon, 25 Nov 2019 13:59:54 -0500 Subject: fixes for docking views to reliable show their panes in tree view. fixes for search to fit available space and update properly when scrolled. fixed ymargins on stacking views with titles. --- src/client/documents/Documents.ts | 4 ++- .../views/collections/CollectionDockingView.tsx | 37 ++++++++-------------- .../views/collections/CollectionStackingView.tsx | 2 +- src/client/views/nodes/DocumentView.tsx | 2 +- src/client/views/search/SearchBox.scss | 4 --- src/client/views/search/SearchBox.tsx | 13 ++++---- 6 files changed, 25 insertions(+), 37 deletions(-) (limited to 'src') diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index 3f223c281..dea057b93 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -524,7 +524,9 @@ export namespace Docs { } export function DockDocument(documents: Array, config: string, options: DocumentOptions, id?: string) { - return InstanceFromProto(Prototypes.get(DocumentType.COL), new List(documents), { ...options, viewType: CollectionViewType.Docking, dockingConfig: config }, id); + let inst = InstanceFromProto(Prototypes.get(DocumentType.COL), new List(documents), { ...options, viewType: CollectionViewType.Docking, dockingConfig: config }, id); + Doc.GetProto(inst).data = new List(documents); + return inst; } export function DirectoryImportDocument(options: DocumentOptions = {}) { diff --git a/src/client/views/collections/CollectionDockingView.tsx b/src/client/views/collections/CollectionDockingView.tsx index a99688017..3040e74b0 100644 --- a/src/client/views/collections/CollectionDockingView.tsx +++ b/src/client/views/collections/CollectionDockingView.tsx @@ -146,8 +146,6 @@ export class CollectionDockingView extends React.Component { - let docs = Cast(this.props.Document.data, listSpec(Doc)); - if (!docs) { - return false; - } - return docs.includes(document); - } - // // Creates a vertical split on the right side of the docking view, and then adds the Document to that split // @@ -187,10 +177,6 @@ export class CollectionDockingView extends React.Component { Doc.GetProto(document).lastOpened = new DateField; - let docs = Cast(this.props.Document.data, listSpec(Doc)); - if (docs) { - docs.push(document); - } let docContentConfig = CollectionDockingView.makeDocumentConfig(document, dataDocument); if (stack === undefined) { let stack: any = this._goldenLayout.root; @@ -369,15 +351,22 @@ export class CollectionDockingView extends React.Component { + let matches = json.match(/\"documentId\":\"[a-z0-9-]+\"/g); + let docids = matches?.map(m => m.replace("\"documentId\":\"", "").replace("\"", "")); + + if (docids) { + let docs = (await Promise.all(docids.map(id => DocServer.GetRefField(id)))).filter(f => f).map(f => f as Doc); + Doc.GetProto(this.props.Document)[this.props.fieldKey] = new List(docs); + } + } + @undoBatch stateChanged = () => { - let docs = Cast(CollectionDockingView.Instance.props.Document.data, listSpec(Doc)); - CollectionDockingView.Instance._removedDocs.map(theDoc => - docs && docs.indexOf(theDoc) !== -1 && - docs.splice(docs.indexOf(theDoc), 1)); - CollectionDockingView.Instance._removedDocs.length = 0; var json = JSON.stringify(this._goldenLayout.toConfig()); this.props.Document.dockingConfig = json; + this.updateDataField(json); + if (this.undohack && !this.hack) { this.undohack.end(); this.undohack = undefined; @@ -704,4 +693,4 @@ export class DockedFrameRenderer extends React.Component { {this.docView}
); } -} \ No newline at end of file +} diff --git a/src/client/views/collections/CollectionStackingView.tsx b/src/client/views/collections/CollectionStackingView.tsx index be3bfca0a..7d83d5ff3 100644 --- a/src/client/views/collections/CollectionStackingView.tsx +++ b/src/client/views/collections/CollectionStackingView.tsx @@ -40,7 +40,7 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { @computed get sectionFilter() { return StrCast(this.props.Document.sectionFilter); } @computed get filteredChildren() { return this.childDocs.filter(d => !d.isMinimized); } @computed get xMargin() { return NumCast(this.props.Document.xMargin, 2 * this.gridGap); } - @computed get yMargin() { return NumCast(this.props.Document.yMargin, 2 * this.gridGap); } + @computed get yMargin() { return Math.max(this.props.Document.showTitle ? 30 : 0, NumCast(this.props.Document.yMargin, 2 * this.gridGap)); } @computed get gridGap() { return NumCast(this.props.Document.gridGap, 10); } @computed get isStackingView() { return BoolCast(this.props.Document.singleColumn, true); } @computed get numGroupColumns() { return this.isStackingView ? Math.max(1, this.Sections.size + (this.showAddAGroup ? 1 : 0)) : 1; } diff --git a/src/client/views/nodes/DocumentView.tsx b/src/client/views/nodes/DocumentView.tsx index 39a68f51e..8486c0f34 100644 --- a/src/client/views/nodes/DocumentView.tsx +++ b/src/client/views/nodes/DocumentView.tsx @@ -658,7 +658,7 @@ export class DocumentView extends DocComponent(Docu let oldPoint2 = this.prevPoints.get(pt2.identifier); let pinching = InteractionUtils.Pinning(pt1, pt2, oldPoint1!, oldPoint2!); if (pinching !== 0) { - let newWidth = Math.max(Math.abs(oldPoint1!.clientX - oldPoint2!.clientX), Math.abs(pt1.clientX - pt2.clientX)) + let newWidth = Math.max(Math.abs(oldPoint1!.clientX - oldPoint2!.clientX), Math.abs(pt1.clientX - pt2.clientX)); this.props.Document.width = newWidth; } } diff --git a/src/client/views/search/SearchBox.scss b/src/client/views/search/SearchBox.scss index bc11604a5..4eb992d36 100644 --- a/src/client/views/search/SearchBox.scss +++ b/src/client/views/search/SearchBox.scss @@ -69,11 +69,7 @@ top: 300px; display: flex; flex-direction: column; - // height: 560px; height: 100%; - // overflow: hidden; - // overflow-y: auto; - max-height: 560px; overflow: hidden; overflow-y: auto; diff --git a/src/client/views/search/SearchBox.tsx b/src/client/views/search/SearchBox.tsx index 899a35f48..5c1bd8ef9 100644 --- a/src/client/views/search/SearchBox.tsx +++ b/src/client/views/search/SearchBox.tsx @@ -145,6 +145,7 @@ export class SearchBox extends React.Component { } + private NumResults = 25; private lockPromise?: Promise; getResults = async (query: string) => { if (this.lockPromise) { @@ -152,7 +153,7 @@ export class SearchBox extends React.Component { } this.lockPromise = new Promise(async res => { while (this._results.length <= this._endIndex && (this._numTotalResults === -1 || this._maxSearchIndex < this._numTotalResults)) { - this._curRequest = SearchUtil.Search(query, true, { fq: this.filterQuery, start: this._maxSearchIndex, rows: 10, hl: true, "hl.fl": "*" }).then(action(async (res: SearchUtil.DocSearchResult) => { + this._curRequest = SearchUtil.Search(query, true, { fq: this.filterQuery, start: this._maxSearchIndex, rows: this.NumResults, hl: true, "hl.fl": "*" }).then(action(async (res: SearchUtil.DocSearchResult) => { // happens at the beginning if (res.numFound !== this._numTotalResults && this._numTotalResults === -1) { @@ -186,7 +187,7 @@ export class SearchBox extends React.Component { this._curRequest = undefined; })); - this._maxSearchIndex += 10; + this._maxSearchIndex += this.NumResults; await this._curRequest; } @@ -267,9 +268,9 @@ export class SearchBox extends React.Component { @action resultsScrolled = (e?: React.UIEvent) => { let scrollY = e ? e.currentTarget.scrollTop : this.resultsRef.current ? this.resultsRef.current.scrollTop : 0; - let buffer = 4; - let startIndex = Math.floor(Math.max(0, scrollY / 70 - buffer)); - let endIndex = Math.ceil(Math.min(this._numTotalResults - 1, startIndex + (560 / 70) + buffer)); + let itemHght = 53; + let startIndex = Math.floor(Math.max(0, scrollY / itemHght)); + let endIndex = Math.ceil(Math.min(this._numTotalResults - 1, startIndex + (this.resultsRef.current!.getBoundingClientRect().height / itemHght))); this._endIndex = endIndex === -1 ? 12 : endIndex; @@ -353,7 +354,7 @@ export class SearchBox extends React.Component {
)}
{this._visibleElements}
-- cgit v1.2.3-70-g09d2 From bbf482ed6446899bd29e005184ecc779470658c7 Mon Sep 17 00:00:00 2001 From: Bob Zeleznik Date: Mon, 25 Nov 2019 15:12:55 -0500 Subject: title template fixes for tree/stacking views. --- src/client/views/collections/CollectionStackingView.tsx | 6 ++++-- src/client/views/collections/CollectionTreeView.tsx | 13 ++++++++----- src/client/views/collections/CollectionView.tsx | 2 +- 3 files changed, 13 insertions(+), 8 deletions(-) (limited to 'src') diff --git a/src/client/views/collections/CollectionStackingView.tsx b/src/client/views/collections/CollectionStackingView.tsx index 7d83d5ff3..04f57df67 100644 --- a/src/client/views/collections/CollectionStackingView.tsx +++ b/src/client/views/collections/CollectionStackingView.tsx @@ -41,6 +41,7 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { @computed get filteredChildren() { return this.childDocs.filter(d => !d.isMinimized); } @computed get xMargin() { return NumCast(this.props.Document.xMargin, 2 * this.gridGap); } @computed get yMargin() { return Math.max(this.props.Document.showTitle ? 30 : 0, NumCast(this.props.Document.yMargin, 2 * this.gridGap)); } + @computed get titleSpacing() { return this.props.Document.showTitle ? Math.max(0, NumCast(this.props.Document.yMargin, 2 * this.gridGap) - this.gridGap) : 0; } @computed get gridGap() { return NumCast(this.props.Document.gridGap, 10); } @computed get isStackingView() { return BoolCast(this.props.Document.singleColumn, true); } @computed get numGroupColumns() { return this.isStackingView ? Math.max(1, this.Sections.size + (this.showAddAGroup ? 1 : 0)) : 1; } @@ -229,7 +230,8 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { } @computed get columnDragger() { - return
+ return
; } @@ -317,7 +319,7 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { let outerXf = Utils.GetScreenTransform(this._masonryGridRef!); let offset = this.props.ScreenToLocalTransform().transformDirection(outerXf.translateX - translateX, outerXf.translateY - translateY); return this.props.ScreenToLocalTransform(). - translate(offset[0], offset[1] + (this.props.ChromeHeight ? this.props.ChromeHeight() : 0)). + translate(offset[0], offset[1] + (this.props.ChromeHeight && this.props.ChromeHeight() < 0 ? this.props.ChromeHeight() : 0)). scale(NumCast(doc.width, 1) / this.columnWidth); } diff --git a/src/client/views/collections/CollectionTreeView.tsx b/src/client/views/collections/CollectionTreeView.tsx index 8b993820b..261354ba8 100644 --- a/src/client/views/collections/CollectionTreeView.tsx +++ b/src/client/views/collections/CollectionTreeView.tsx @@ -43,6 +43,7 @@ export interface TreeViewProps { pinToPres: (document: Doc) => void; panelWidth: () => number; panelHeight: () => number; + ChromeHeight: undefined | (() => number); addDocument: (doc: Doc, relativeTo?: Doc, before?: boolean) => boolean; indentDocument?: () => void; ScreenToLocalTransform: () => Transform; @@ -233,8 +234,8 @@ class TreeView extends React.Component { docTransform = () => { let { scale, translateX, translateY } = Utils.GetScreenTransform(this._dref.current!); let outerXf = this.props.outerXf(); - let offset = this.props.ScreenToLocalTransform().transformDirection(outerXf.translateX - translateX, outerXf.translateY - translateY); - let finalXf = this.props.ScreenToLocalTransform().translate(offset[0], offset[1]); + let offset = this.props.ScreenToLocalTransform().transformDirection(outerXf.translateX - translateX, outerXf.translateY - translateY ); + let finalXf = this.props.ScreenToLocalTransform().translate(offset[0], offset[1]+ (this.props.ChromeHeight && this.props.ChromeHeight() < 0 ? this.props.ChromeHeight() : 0)); return finalXf; } docWidth = () => { @@ -272,7 +273,7 @@ class TreeView extends React.Component { contentElement = TreeView.GetChildElements(contents instanceof Doc ? [contents] : DocListCast(contents), this.props.treeViewId, doc, undefined, key, addDoc, remDoc, this.move, this.props.dropAction, this.props.addDocTab, this.props.pinToPres, this.props.ScreenToLocalTransform, this.props.outerXf, this.props.active, - this.props.panelWidth, this.props.renderDepth, this.props.showHeaderFields, this.props.preventTreeViewOpen, + this.props.panelWidth, this.props.ChromeHeight, this.props.renderDepth, this.props.showHeaderFields, this.props.preventTreeViewOpen, [...this.props.renderedIds, doc[Id]]); } else { contentElement = { TreeView.GetChildElements(docs, this.props.treeViewId, Doc.Layout(this.props.document), this.templateDataDoc, expandKey, addDoc, remDoc, this.move, this.props.dropAction, this.props.addDocTab, this.props.pinToPres, this.props.ScreenToLocalTransform, - this.props.outerXf, this.props.active, this.props.panelWidth, this.props.renderDepth, this.props.showHeaderFields, this.props.preventTreeViewOpen, + this.props.outerXf, this.props.active, this.props.panelWidth, this.props.ChromeHeight, this.props.renderDepth, this.props.showHeaderFields, this.props.preventTreeViewOpen, [...this.props.renderedIds, this.props.document[Id]])} ; } else if (this.treeViewExpandedView === "fields") { @@ -414,6 +415,7 @@ class TreeView extends React.Component { outerXf: () => { translateX: number, translateY: number }, active: (outsideReaction?: boolean) => boolean, panelWidth: () => number, + ChromeHeight: undefined| (() => number), renderDepth: number, showHeaderFields: () => boolean, preventTreeViewOpen: boolean, @@ -487,6 +489,7 @@ class TreeView extends React.Component { addDocument={addDocument} panelWidth={rowWidth} panelHeight={rowHeight} + ChromeHeight={ChromeHeight} moveDocument={move} dropAction={dropAction} addDocTab={addDocTab} @@ -591,7 +594,7 @@ export class CollectionTreeView extends CollectionSubView(Document) { { TreeView.GetChildElements(this.childDocs, this.props.Document[Id], this.props.Document, this.props.DataDoc, this.props.fieldKey, addDoc, this.remove, moveDoc, dropAction, this.props.addDocTab, this.props.pinToPres, this.props.ScreenToLocalTransform, - this.outerXf, this.props.active, this.props.PanelWidth, this.props.renderDepth, () => !this.props.Document.hideHeaderFields, + this.outerXf, this.props.active, this.props.PanelWidth, this.props.ChromeHeight, this.props.renderDepth, () => !this.props.Document.hideHeaderFields, BoolCast(this.props.Document.preventTreeViewOpen), []) } diff --git a/src/client/views/collections/CollectionView.tsx b/src/client/views/collections/CollectionView.tsx index 8387e95df..4c49054d2 100644 --- a/src/client/views/collections/CollectionView.tsx +++ b/src/client/views/collections/CollectionView.tsx @@ -111,7 +111,7 @@ export class CollectionView extends Touchable { componentWillUnmount = () => this._reactionDisposer && this._reactionDisposer(); // bcz: Argh? What's the height of the collection chromes?? - chromeHeight = () => (this.props.ChromeHeight ? this.props.ChromeHeight() : 0) + (this.props.Document.chromeStatus === "enabled" ? -60 : 0); + chromeHeight = () => (this.props.Document.chromeStatus === "enabled" ? -60 : 0); active = (outsideReaction?: boolean) => this.props.isSelected(outsideReaction) || BoolCast(this.props.Document.forceActive) || this._isChildActive || this.props.renderDepth === 0; -- cgit v1.2.3-70-g09d2 From ef4093854ac9e3b2dadc63ac183792fa49d98f7b Mon Sep 17 00:00:00 2001 From: Bob Zeleznik Date: Mon, 25 Nov 2019 16:21:26 -0500 Subject: cleanup of stacking/masonry and fix so that masonry layout updates interactive as docs are resized. --- .../collections/CollectionMasonryViewFieldRow.tsx | 129 ++++++++++----------- .../views/collections/CollectionStackingView.scss | 5 +- .../views/collections/CollectionStackingView.tsx | 5 +- 3 files changed, 69 insertions(+), 70 deletions(-) (limited to 'src') diff --git a/src/client/views/collections/CollectionMasonryViewFieldRow.tsx b/src/client/views/collections/CollectionMasonryViewFieldRow.tsx index 52ebfafd3..df4b00b3a 100644 --- a/src/client/views/collections/CollectionMasonryViewFieldRow.tsx +++ b/src/client/views/collections/CollectionMasonryViewFieldRow.tsx @@ -2,7 +2,7 @@ import React = require("react"); import { library } from '@fortawesome/fontawesome-svg-core'; import { faPalette } from '@fortawesome/free-solid-svg-icons'; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; -import { action, observable } from "mobx"; +import { action, observable, computed } from "mobx"; import { observer } from "mobx-react"; import Measure from "react-measure"; import { Doc } from "../../../new_fields/Doc"; @@ -20,7 +20,6 @@ import { anchorPoints, Flyout } from "../DocumentDecorations"; import { EditableView } from "../EditableView"; import { CollectionStackingView } from "./CollectionStackingView"; import "./CollectionStackingView.scss"; -import { undo } from "prosemirror-history"; library.add(faPalette); @@ -258,12 +257,44 @@ export class CollectionMasonryViewFieldRow extends React.Component "", + SetValue: this.addDocument, + contents: "+ NEW", + HeadingObject: this.props.headingObject, + HeadingsHack: this._headingsHack, + toggle: this.toggleVisibility, + color: this._color + }; + return collapsed ? (null) : +
+
list + ` ${this.props.parent.columnWidth}px`, ""), + }}> + {this.props.parent.children(this.props.docList)} + {this.props.parent.columnDragger} +
+ {(chromeStatus !== 'view-mode' && chromeStatus !== 'disabled') ? +
+ +
: null + } +
; + } + + @computed get headingView() { let heading = this._heading; - let style = this.props.parent; + let key = StrCast(this.props.parent.props.Document.sectionFilter); let evContents = heading ? heading : this.props.type && this.props.type === "number" ? "0" : `NO ${key.toUpperCase()} VALUE`; let headerEditableViewProps = { GetValue: () => evContents, @@ -275,30 +306,17 @@ export class CollectionMasonryViewFieldRow extends React.Component "", - SetValue: this.addDocument, - contents: "+ NEW", - HeadingObject: this.props.headingObject, - HeadingsHack: this._headingsHack, - toggle: this.toggleVisibility, - color: this._color - }; - let headingView = this.props.parent.props.Document.miniHeaders ? -
- {} + return this.props.parent.props.Document.miniHeaders ? +
+
: - this.props.headingObject ? + !this.props.headingObject ? (null) :
- {} + style={{ background: evContents !== `NO ${key.toUpperCase()} VALUE` ? this._color : "lightgrey", }}> + {evContents === `NO ${key.toUpperCase()} VALUE` ? (null) :
@@ -321,47 +339,26 @@ export class CollectionMasonryViewFieldRow extends React.Component }
-
: (null); +
; + } + render() { const background = this._background; //to account for observables in Measure - const collapsed = this._collapsed; - let chromeStatus = this.props.parent.props.Document.chromeStatus; - return ( - - {({ measureRef }) => { - return
-
- {headingView} - {collapsed ? (null) : - < div style={{ position: "relative" }}> -
list + ` ${this.props.parent.columnWidth}px`, ""), - }}> - {this.props.parent.children(this.props.docList)} - {this.props.parent.columnDragger} -
- {(chromeStatus !== 'view-mode' && chromeStatus !== 'disabled') ? -
- -
: null - } -
- } -
-
; - }} - - ); + let contentlayout = this.contentLayout; + let headingview = this.headingView; + return + {({ measureRef }) => { + return
+
+ {headingview} + {contentlayout} +
+
; + }} +
; } } \ No newline at end of file diff --git a/src/client/views/collections/CollectionStackingView.scss b/src/client/views/collections/CollectionStackingView.scss index 29178b909..e1577cfee 100644 --- a/src/client/views/collections/CollectionStackingView.scss +++ b/src/client/views/collections/CollectionStackingView.scss @@ -97,6 +97,7 @@ .collectionStackingView-columnDoc { display: inline-block; + margin: auto; } .collectionStackingView-masonryDoc { @@ -177,7 +178,9 @@ .collectionStackingView-sectionHeader-subCont { outline: none; border: 0px; - color: $light-color; + color: $light-color; + width: 100%; + color: grey; letter-spacing: 2px; font-size: 75%; transition: transform 0.2s; diff --git a/src/client/views/collections/CollectionStackingView.tsx b/src/client/views/collections/CollectionStackingView.tsx index 04f57df67..e564f1193 100644 --- a/src/client/views/collections/CollectionStackingView.tsx +++ b/src/client/views/collections/CollectionStackingView.tsx @@ -41,7 +41,6 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { @computed get filteredChildren() { return this.childDocs.filter(d => !d.isMinimized); } @computed get xMargin() { return NumCast(this.props.Document.xMargin, 2 * this.gridGap); } @computed get yMargin() { return Math.max(this.props.Document.showTitle ? 30 : 0, NumCast(this.props.Document.yMargin, 2 * this.gridGap)); } - @computed get titleSpacing() { return this.props.Document.showTitle ? Math.max(0, NumCast(this.props.Document.yMargin, 2 * this.gridGap) - this.gridGap) : 0; } @computed get gridGap() { return NumCast(this.props.Document.gridGap, 10); } @computed get isStackingView() { return BoolCast(this.props.Document.singleColumn, true); } @computed get numGroupColumns() { return this.isStackingView ? Math.max(1, this.Sections.size + (this.showAddAGroup ? 1 : 0)) : 1; } @@ -65,7 +64,7 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { let dxf = () => this.getDocTransform(layoutDoc, dref.current!); this._docXfs.push({ dxf: dxf, width: width, height: height }); let rowSpan = Math.ceil((height() + this.gridGap) / this.gridGap); - let style = this.isStackingView ? { width: width(), margin: "auto", marginTop: i === 0 ? 0 : this.gridGap, height: height() } : { gridRowEnd: `span ${rowSpan}` }; + let style = this.isStackingView ? { width: width(), marginTop: i === 0 ? 0 : this.gridGap, height: height() } : { gridRowEnd: `span ${rowSpan}` }; return
{this.getDisplayDoc(pair.layout || d, pair.data, dxf, width)}
; @@ -231,7 +230,7 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { @computed get columnDragger() { return
+ style={{ cursor: this._cursor, left: `${this.columnWidth + this.xMargin}px`, top: `${Math.max(0, this.yMargin - 9)}px` }} >
; } -- cgit v1.2.3-70-g09d2 From 4cf3b0a4673a00f0e1de107b29a0c0b658266f46 Mon Sep 17 00:00:00 2001 From: bob Date: Tue, 26 Nov 2019 13:26:34 -0500 Subject: fixed tree view sorting (a little) and kept title box editable after tabbing. --- .../views/collections/CollectionTreeView.scss | 3 + .../views/collections/CollectionTreeView.tsx | 72 +++++++++++++++------- .../views/collections/CollectionViewChromes.tsx | 15 +++-- .../CollectionFreeFormLinkView.tsx | 15 +++-- .../views/nodes/CollectionFreeFormDocumentView.tsx | 2 +- src/client/views/nodes/DocumentView.tsx | 9 +-- 6 files changed, 74 insertions(+), 42 deletions(-) (limited to 'src') diff --git a/src/client/views/collections/CollectionTreeView.scss b/src/client/views/collections/CollectionTreeView.scss index 7d0c900a6..8b12395a7 100644 --- a/src/client/views/collections/CollectionTreeView.scss +++ b/src/client/views/collections/CollectionTreeView.scss @@ -114,6 +114,9 @@ .treeViewItem-header { border: transparent 1px solid; display: flex; + .editableView-container-editing-oneLine { + min-width: 15px; + } } .treeViewItem-header-above { diff --git a/src/client/views/collections/CollectionTreeView.tsx b/src/client/views/collections/CollectionTreeView.tsx index 261354ba8..a21bc6c14 100644 --- a/src/client/views/collections/CollectionTreeView.tsx +++ b/src/client/views/collections/CollectionTreeView.tsx @@ -1,7 +1,7 @@ import { library } from '@fortawesome/fontawesome-svg-core'; import { faAngleRight, faArrowsAltH, faBell, faCamera, faCaretDown, faCaretRight, faCaretSquareDown, faCaretSquareRight, faExpand, faMinus, faPlus, faTrash, faTrashAlt } from '@fortawesome/free-solid-svg-icons'; import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; -import { action, computed, observable } from "mobx"; +import { action, computed, observable, trace, runInAction } from "mobx"; import { observer } from "mobx-react"; import { Doc, DocListCast, Field, HeightSym, Opt, WidthSym } from '../../../new_fields/Doc'; import { Id } from '../../../new_fields/FieldSymbols'; @@ -82,10 +82,11 @@ class TreeView extends React.Component { private _header?: React.RefObject = React.createRef(); private _treedropDisposer?: DragManager.DragDropDisposer; private _dref = React.createRef(); + get displayName() { return "TreeView(" + this.props.document.title + ")"; } // this makes mobx trace() statements more descriptive get defaultExpandedView() { return this.childDocs ? this.fieldKey : StrCast(this.props.document.defaultExpandedView, "fields"); } @observable _overrideTreeViewOpen = false; // override of the treeViewOpen field allowing the display state to be independent of the document's state set treeViewOpen(c: boolean) { if (this.props.preventTreeViewOpen) this._overrideTreeViewOpen = c; else this.props.document.treeViewOpen = c; } - @computed get treeViewOpen() { return (BoolCast(this.props.document.treeViewOpen) && !this.props.preventTreeViewOpen) || this._overrideTreeViewOpen; } + @computed get treeViewOpen() { trace(); return (this.props.document.treeViewOpen && !this.props.preventTreeViewOpen) || this._overrideTreeViewOpen; } @computed get treeViewExpandedView() { return StrCast(this.props.document.treeViewExpandedView, this.defaultExpandedView); } @computed get MAX_EMBED_HEIGHT() { return NumCast(this.props.document.maxEmbedHeight, 300); } @computed get dataDoc() { return this.templateDataDoc ? this.templateDataDoc : this.props.document; } @@ -110,7 +111,7 @@ class TreeView extends React.Component { return this.props.dataDoc; } @computed get boundsOfCollectionDocument() { - return StrCast(this.props.document.type).indexOf(DocumentType.COL) === -1 ? undefined : + return StrCast(this.props.document.type).indexOf(DocumentType.COL) === -1 || !DocListCast(this.props.document[this.fieldKey]).length ? undefined : Doc.ComputeContentBounds(DocListCast(this.props.document[this.fieldKey])); } @@ -158,22 +159,30 @@ class TreeView extends React.Component { editableView = (key: string, style?: string) => ( StrCast(this.props.document[key])} SetValue={undoBatch((value: string) => Doc.SetInPlace(this.props.document, key, value, false) || true)} OnFillDown={undoBatch((value: string) => { Doc.SetInPlace(this.props.document, key, value, false); - let doc = this.props.document.layoutCustom instanceof Doc ? Doc.ApplyTemplate(Doc.GetProto(this.props.document.layoutCustom)) : undefined; - if (!doc) doc = Docs.Create.FreeformDocument([], { title: "", x: 0, y: 0, width: 100, height: 25, templates: new List([Templates.Title.Layout]) }); + let layoutDoc = this.props.document.layoutCustom instanceof Doc ? Doc.ApplyTemplate(Doc.GetProto(this.props.document.layoutCustom)) : undefined; + let doc = layoutDoc || Docs.Create.FreeformDocument([], { title: "", x: 0, y: 0, width: 100, height: 25, templates: new List([Templates.Title.Layout]) }); TreeView.loadId = doc[Id]; return this.props.addDocument(doc); })} - OnTab={() => { TreeView.loadId = ""; this.props.indentDocument && this.props.indentDocument(); }} + OnTab={undoBatch(() => { + TreeView.loadId = this.dataDoc[Id]; + this.props.indentDocument?.(); + setTimeout(() => { // unsetting/setting brushing for this doc will recreate & refocus this editableView after all other treeview changes have been made to the Dom (which may remove focus from this document). + Doc.UnBrushDoc(this.props.document); + Doc.BrushDoc(this.props.document); + TreeView.loadId = ""; + }, 0); + })} />) onWorkspaceContextMenu = (e: React.MouseEvent): void => { @@ -207,7 +216,7 @@ class TreeView extends React.Component { let rect = this._header!.current!.getBoundingClientRect(); let bounds = this.props.ScreenToLocalTransform().transformPoint(rect.left, rect.top + rect.height / 2); let before = x[1] < bounds[1]; - let inside = x[0] > bounds[0] + 75 || (!before && this.treeViewOpen); + let inside = x[0] > bounds[0] + 75 || (!before && this.treeViewOpen && DocListCast(this.dataDoc[this.fieldKey]).length); if (de.data instanceof DragManager.LinkDragData) { let sourceDoc = de.data.linkSourceDocument; let destDoc = this.props.document; @@ -234,8 +243,8 @@ class TreeView extends React.Component { docTransform = () => { let { scale, translateX, translateY } = Utils.GetScreenTransform(this._dref.current!); let outerXf = this.props.outerXf(); - let offset = this.props.ScreenToLocalTransform().transformDirection(outerXf.translateX - translateX, outerXf.translateY - translateY ); - let finalXf = this.props.ScreenToLocalTransform().translate(offset[0], offset[1]+ (this.props.ChromeHeight && this.props.ChromeHeight() < 0 ? this.props.ChromeHeight() : 0)); + let offset = this.props.ScreenToLocalTransform().transformDirection(outerXf.translateX - translateX, outerXf.translateY - translateY); + let finalXf = this.props.ScreenToLocalTransform().translate(offset[0], offset[1] + (this.props.ChromeHeight && this.props.ChromeHeight() < 0 ? this.props.ChromeHeight() : 0)); return finalXf; } docWidth = () => { @@ -258,8 +267,10 @@ class TreeView extends React.Component { })()); } - expandedField = (doc: Doc) => { + @computed get expandedField() { + trace(); let ids: { [key: string]: string } = {}; + let doc = this.props.document; doc && Object.keys(doc).forEach(key => !(key in ids) && doc[key] !== ComputedField.undefined && (ids[key] = key)); let rows: JSX.Element[] = []; @@ -296,6 +307,7 @@ class TreeView extends React.Component { noOverlays = (doc: Doc) => ({ title: "", caption: "" }); @computed get renderContent() { + trace(); const expandKey = this.treeViewExpandedView === this.fieldKey ? this.fieldKey : this.treeViewExpandedView === "links" ? "links" : undefined; if (expandKey !== undefined) { let remDoc = (doc: Doc) => this.remove(doc, expandKey); @@ -311,7 +323,7 @@ class TreeView extends React.Component { ; } else if (this.treeViewExpandedView === "fields") { return
    - {this.expandedField(this.props.document)} + {this.expandedField}
; } else { let layoutDoc = Doc.Layout(this.props.document); @@ -342,6 +354,7 @@ class TreeView extends React.Component { @computed get renderBullet() { + trace(); return
{ this.treeViewOpen = !this.treeViewOpen; e.stopPropagation(); })} style={{ color: StrCast(this.props.document.color, "black"), opacity: 0.4 }}> {}
; @@ -387,6 +400,7 @@ class TreeView extends React.Component { } render() { + trace(); return
  • @@ -400,7 +414,7 @@ class TreeView extends React.Component {
    ; } public static GetChildElements( - docs: Doc[], + childDocs: Doc[], treeViewId: string, containingCollection: Doc, dataDoc: Doc | undefined, @@ -415,7 +429,7 @@ class TreeView extends React.Component { outerXf: () => { translateX: number, translateY: number }, active: (outsideReaction?: boolean) => boolean, panelWidth: () => number, - ChromeHeight: undefined| (() => number), + ChromeHeight: undefined | (() => number), renderDepth: number, showHeaderFields: () => boolean, preventTreeViewOpen: boolean, @@ -423,12 +437,27 @@ class TreeView extends React.Component { ) { const viewSpecScript = Cast(containingCollection.viewSpecScript, ScriptField); if (viewSpecScript) { - docs = docs.filter(d => viewSpecScript.script.run({ doc: d }, console.log).result); + childDocs = childDocs.filter(d => viewSpecScript.script.run({ doc: d }, console.log).result); } - let ascending = Cast(containingCollection.sortAscending, "boolean", null); + let docs = childDocs.slice(); + let dataExtension = containingCollection[key + "_ext"] as Doc; + let ascending = dataExtension && BoolCast(dataExtension.sortAscending, null); if (ascending !== undefined) { - docs.sort(function (a, b): 1 | -1 { + + let sortAlphaNum = (a: string, b: string): 0 | 1 | -1 => { + var reN = /[0-9]*$/; + var aA = a.replace(reN, ""); // get rid of trailing numbers + var bA = b.replace(reN, ""); + if (aA === bA) { // if header string matches, then compare numbers numerically + var aN = parseInt(a.match(reN)![0], 10); + var bN = parseInt(b.match(reN)![0], 10); + return aN === bN ? 0 : aN > bN ? 1 : -1; + } else { + return aA > bA ? 1 : -1; + } + } + docs.sort(function (a, b): 0 | 1 | -1 { let descA = ascending ? b : a; let descB = ascending ? a : b; let first = descA.title; @@ -438,7 +467,7 @@ class TreeView extends React.Component { return (first - second) > 0 ? 1 : -1; } if (typeof first === 'string' && typeof second === 'string') { - return first > second ? 1 : -1; + return sortAlphaNum(first, second); } if (typeof first === 'boolean' && typeof second === 'boolean') { // if (first === second) { // bugfixing?: otherwise, the list "flickers" because the list is resorted during every load @@ -565,6 +594,7 @@ export class CollectionTreeView extends CollectionSubView(Document) { } render() { + trace(); let dropAction = StrCast(this.props.Document.dropAction) as dropActionType; let addDoc = (doc: Doc, relativeTo?: Doc, before?: boolean) => Doc.AddDocToList(this.props.Document, this.props.fieldKey, doc, relativeTo, before, false, false, false); let moveDoc = (d: Doc, target: Doc, addDoc: (doc: Doc) => boolean) => this.props.moveDocument(d, target, addDoc); @@ -584,8 +614,8 @@ export class CollectionTreeView extends CollectionSubView(Document) { SetValue={undoBatch((value: string) => Doc.SetInPlace(this.dataDoc, "title", value, false) || true)} OnFillDown={undoBatch((value: string) => { Doc.SetInPlace(this.dataDoc, "title", value, false); - let doc = this.props.Document.layoutCustom instanceof Doc ? Doc.ApplyTemplate(Doc.GetProto(this.props.Document.layoutCustom)) : undefined; - if (!doc) doc = Docs.Create.FreeformDocument([], { title: "", x: 0, y: 0, width: 100, height: 25, templates: new List([Templates.Title.Layout]) }); + let layoutDoc = this.props.Document.layoutCustom instanceof Doc ? Doc.ApplyTemplate(Doc.GetProto(this.props.Document.layoutCustom)) : undefined; + let doc = layoutDoc || Docs.Create.FreeformDocument([], { title: "", x: 0, y: 0, width: 100, height: 25, templates: new List([Templates.Title.Layout]) }); TreeView.loadId = doc[Id]; Doc.AddDocToList(this.props.Document, this.props.fieldKey, doc, this.childDocs.length ? this.childDocs[0] : undefined, true, false, false, false); })} /> diff --git a/src/client/views/collections/CollectionViewChromes.tsx b/src/client/views/collections/CollectionViewChromes.tsx index cfc6c2a3f..06fca7c38 100644 --- a/src/client/views/collections/CollectionViewChromes.tsx +++ b/src/client/views/collections/CollectionViewChromes.tsx @@ -624,12 +624,19 @@ export class CollectionSchemaViewChrome extends React.Component { - @computed private get descending() { return Cast(this.props.CollectionView.props.Document.sortAscending, "boolean", null); } + get dataExtension() { + return this.props.CollectionView.props.Document[this.props.CollectionView.props.fieldKey + "_ext"] as Doc; + } + @computed private get descending() { + return this.dataExtension && Cast(this.dataExtension.sortAscending, "boolean", null); + } @action toggleSort = () => { - if (this.props.CollectionView.props.Document.sortAscending) this.props.CollectionView.props.Document.sortAscending = undefined; - else if (this.props.CollectionView.props.Document.sortAscending === undefined) this.props.CollectionView.props.Document.sortAscending = false; - else this.props.CollectionView.props.Document.sortAscending = true; + if (this.dataExtension) { + if (this.dataExtension.sortAscending) this.dataExtension.sortAscending = undefined; + else if (this.dataExtension.sortAscending === undefined) this.dataExtension.sortAscending = false; + else this.dataExtension.sortAscending = true; + } } render() { diff --git a/src/client/views/collections/collectionFreeForm/CollectionFreeFormLinkView.tsx b/src/client/views/collections/collectionFreeForm/CollectionFreeFormLinkView.tsx index 73b45edc6..b00728079 100644 --- a/src/client/views/collections/collectionFreeForm/CollectionFreeFormLinkView.tsx +++ b/src/client/views/collections/collectionFreeForm/CollectionFreeFormLinkView.tsx @@ -6,8 +6,9 @@ import "./CollectionFreeFormLinkView.scss"; import React = require("react"); import v5 = require("uuid/v5"); import { DocumentType } from "../../../documents/DocumentTypes"; -import { observable, action, reaction, IReactionDisposer } from "mobx"; +import { observable, action, reaction, IReactionDisposer, trace } from "mobx"; import { StrCast, Cast } from "../../../../new_fields/Types"; +import { TraceMobx } from "../../../../new_fields/util"; export interface CollectionFreeFormLinkViewProps { A: DocumentView; @@ -17,14 +18,14 @@ export interface CollectionFreeFormLinkViewProps { @observer export class CollectionFreeFormLinkView extends React.Component { - @observable _opacity: number = 1; - @observable _update: number = 0; + @observable _opacity: number = 0; _anchorDisposer: IReactionDisposer | undefined; @action componentDidMount() { - setTimeout(action(() => this._opacity = 0.05), 750); this._anchorDisposer = reaction(() => [this.props.A.props.ScreenToLocalTransform(), this.props.B.props.ScreenToLocalTransform()], - () => { + action(() => { + setTimeout(action(() => this._opacity = 1), 0); // since the render code depends on querying the Dom through getBoudndingClientRect, we need to delay triggering render() + setTimeout(action(() => this._opacity = 0.05), 750); // this will unhighlight the link line. let acont = this.props.A.props.Document.type === DocumentType.LINK ? this.props.A.ContentDiv!.getElementsByClassName("docuLinkBox-cont") : []; let bcont = this.props.B.props.Document.type === DocumentType.LINK ? this.props.B.ContentDiv!.getElementsByClassName("docuLinkBox-cont") : []; let adiv = (acont.length ? acont[0] : this.props.A.ContentDiv!); @@ -45,8 +46,7 @@ export class CollectionFreeFormLinkView extends React.Component(Docu let highlighting = fullDegree && this.layoutDoc.type !== DocumentType.FONTICON && this.layoutDoc.viewType !== CollectionViewType.Linear; return
    { - console.log("Brush" + this.props.Document.title); - Doc.BrushDoc(this.props.Document); - }} onPointerLeave={e => { - console.log("UnBrush" + this.props.Document.title); - Doc.UnBrushDoc(this.props.Document); - - }} + onPointerEnter={e => Doc.BrushDoc(this.props.Document)} onPointerLeave={e => Doc.UnBrushDoc(this.props.Document)} style={{ transition: this.Document.isAnimating ? ".5s linear" : StrCast(this.Document.transition), pointerEvents: this.ignorePointerEvents ? "none" : "all", -- cgit v1.2.3-70-g09d2 From 6cd6e035fc67812afd7a40f8abd0f07f8874f04a Mon Sep 17 00:00:00 2001 From: bob Date: Tue, 26 Nov 2019 14:34:41 -0500 Subject: fixes for tree view drag drop with images. --- src/client/views/EditableView.tsx | 4 +- .../views/collections/CollectionTreeView.tsx | 62 +++++++++++++--------- src/client/views/nodes/ImageBox.scss | 25 ++++----- src/client/views/nodes/ImageBox.tsx | 59 ++++++++++---------- 4 files changed, 79 insertions(+), 71 deletions(-) (limited to 'src') diff --git a/src/client/views/EditableView.tsx b/src/client/views/EditableView.tsx index 8e86f58ee..f78b61892 100644 --- a/src/client/views/EditableView.tsx +++ b/src/client/views/EditableView.tsx @@ -21,7 +21,7 @@ export interface EditableProps { OnFillDown?(value: string): void; - OnTab?(): void; + OnTab?(shift?: boolean): void; /** * The contents to render when not editing @@ -79,7 +79,7 @@ export class EditableView extends React.Component { if (e.key === "Tab") { e.stopPropagation(); this.finalizeEdit(e.currentTarget.value, e.shiftKey); - this.props.OnTab && this.props.OnTab(); + this.props.OnTab && this.props.OnTab(e.shiftKey); } else if (e.key === "Enter") { e.stopPropagation(); if (!e.ctrlKey) { diff --git a/src/client/views/collections/CollectionTreeView.tsx b/src/client/views/collections/CollectionTreeView.tsx index a21bc6c14..42cdd1455 100644 --- a/src/client/views/collections/CollectionTreeView.tsx +++ b/src/client/views/collections/CollectionTreeView.tsx @@ -1,7 +1,7 @@ import { library } from '@fortawesome/fontawesome-svg-core'; import { faAngleRight, faArrowsAltH, faBell, faCamera, faCaretDown, faCaretRight, faCaretSquareDown, faCaretSquareRight, faExpand, faMinus, faPlus, faTrash, faTrashAlt } from '@fortawesome/free-solid-svg-icons'; import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; -import { action, computed, observable, trace, runInAction } from "mobx"; +import { action, computed, observable } from "mobx"; import { observer } from "mobx-react"; import { Doc, DocListCast, Field, HeightSym, Opt, WidthSym } from '../../../new_fields/Doc'; import { Id } from '../../../new_fields/FieldSymbols'; @@ -34,6 +34,7 @@ export interface TreeViewProps { document: Doc; dataDoc?: Doc; containingCollection: Doc; + prevSibling?: Doc; renderDepth: number; deleteDoc: (doc: Doc) => boolean; ruleProvider: Doc | undefined; @@ -46,12 +47,13 @@ export interface TreeViewProps { ChromeHeight: undefined | (() => number); addDocument: (doc: Doc, relativeTo?: Doc, before?: boolean) => boolean; indentDocument?: () => void; + outdentDocument?: () => void; ScreenToLocalTransform: () => Transform; outerXf: () => { translateX: number, translateY: number }; treeViewId: string; parentKey: string; active: (outsideReaction?: boolean) => boolean; - showHeaderFields: () => boolean; + hideHeaderFields: () => boolean; preventTreeViewOpen: boolean; renderedIds: string[]; } @@ -82,11 +84,13 @@ class TreeView extends React.Component { private _header?: React.RefObject = React.createRef(); private _treedropDisposer?: DragManager.DragDropDisposer; private _dref = React.createRef(); + get displayName() { return "TreeView(" + this.props.document.title + ")"; } // this makes mobx trace() statements more descriptive + get defaultExpandedView() { return this.childDocs ? this.fieldKey : StrCast(this.props.document.defaultExpandedView, "fields"); } @observable _overrideTreeViewOpen = false; // override of the treeViewOpen field allowing the display state to be independent of the document's state set treeViewOpen(c: boolean) { if (this.props.preventTreeViewOpen) this._overrideTreeViewOpen = c; else this.props.document.treeViewOpen = c; } - @computed get treeViewOpen() { trace(); return (this.props.document.treeViewOpen && !this.props.preventTreeViewOpen) || this._overrideTreeViewOpen; } + @computed get treeViewOpen() { return (this.props.document.treeViewOpen && !this.props.preventTreeViewOpen) || this._overrideTreeViewOpen; } @computed get treeViewExpandedView() { return StrCast(this.props.document.treeViewExpandedView, this.defaultExpandedView); } @computed get MAX_EMBED_HEIGHT() { return NumCast(this.props.document.maxEmbedHeight, 300); } @computed get dataDoc() { return this.templateDataDoc ? this.templateDataDoc : this.props.document; } @@ -145,11 +149,10 @@ class TreeView extends React.Component { } onDragMove = (e: PointerEvent): void => { Doc.UnBrushDoc(this.dataDoc); - let x = this.props.ScreenToLocalTransform().transformPoint(e.clientX, e.clientY); + let pt = [e.clientX, e.clientY] let rect = this._header!.current!.getBoundingClientRect(); - let bounds = this.props.ScreenToLocalTransform().transformPoint(rect.left, rect.top + rect.height / 2); - let before = x[1] < bounds[1]; - let inside = x[0] > bounds[0] + 75; + let before = pt[1] < rect.top + rect.height / 2; + let inside = pt[0] > Math.min(rect.left + 75, rect.left + rect.width * .75) || (!before && this.treeViewOpen && DocListCast(this.dataDoc[this.fieldKey]).length); this._header!.current!.className = "treeViewItem-header"; if (inside) this._header!.current!.className += " treeViewItem-header-inside"; else if (before) this._header!.current!.className += " treeViewItem-header-above"; @@ -174,9 +177,9 @@ class TreeView extends React.Component { TreeView.loadId = doc[Id]; return this.props.addDocument(doc); })} - OnTab={undoBatch(() => { + OnTab={undoBatch((shift?: boolean) => { TreeView.loadId = this.dataDoc[Id]; - this.props.indentDocument?.(); + shift ? this.props.outdentDocument?.() : this.props.indentDocument?.(); setTimeout(() => { // unsetting/setting brushing for this doc will recreate & refocus this editableView after all other treeview changes have been made to the Dom (which may remove focus from this document). Doc.UnBrushDoc(this.props.document); Doc.BrushDoc(this.props.document); @@ -212,11 +215,10 @@ class TreeView extends React.Component { @undoBatch treeDrop = (e: Event, de: DragManager.DropEvent) => { - let x = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); + let pt = [de.x, de.y]; let rect = this._header!.current!.getBoundingClientRect(); - let bounds = this.props.ScreenToLocalTransform().transformPoint(rect.left, rect.top + rect.height / 2); - let before = x[1] < bounds[1]; - let inside = x[0] > bounds[0] + 75 || (!before && this.treeViewOpen && DocListCast(this.dataDoc[this.fieldKey]).length); + let before = pt[1] < rect.top + rect.height / 2; + let inside = pt[0] > Math.min(rect.left + 75, rect.left + rect.width * .75) || (!before && this.treeViewOpen && DocListCast(this.dataDoc[this.fieldKey]).length); if (de.data instanceof DragManager.LinkDragData) { let sourceDoc = de.data.linkSourceDocument; let destDoc = this.props.document; @@ -268,7 +270,6 @@ class TreeView extends React.Component { } @computed get expandedField() { - trace(); let ids: { [key: string]: string } = {}; let doc = this.props.document; doc && Object.keys(doc).forEach(key => !(key in ids) && doc[key] !== ComputedField.undefined && (ids[key] = key)); @@ -282,9 +283,9 @@ class TreeView extends React.Component { let remDoc = (doc: Doc) => this.remove(doc, key); let addDoc = (doc: Doc, addBefore?: Doc, before?: boolean) => Doc.AddDocToList(this.dataDoc, key, doc, addBefore, before, false, true); contentElement = TreeView.GetChildElements(contents instanceof Doc ? [contents] : - DocListCast(contents), this.props.treeViewId, doc, undefined, key, addDoc, remDoc, this.move, + DocListCast(contents), this.props.treeViewId, doc, undefined, key, this.props.containingCollection, this.props.prevSibling, addDoc, remDoc, this.move, this.props.dropAction, this.props.addDocTab, this.props.pinToPres, this.props.ScreenToLocalTransform, this.props.outerXf, this.props.active, - this.props.panelWidth, this.props.ChromeHeight, this.props.renderDepth, this.props.showHeaderFields, this.props.preventTreeViewOpen, + this.props.panelWidth, this.props.ChromeHeight, this.props.renderDepth, this.props.hideHeaderFields, this.props.preventTreeViewOpen, [...this.props.renderedIds, doc[Id]]); } else { contentElement = { noOverlays = (doc: Doc) => ({ title: "", caption: "" }); @computed get renderContent() { - trace(); const expandKey = this.treeViewExpandedView === this.fieldKey ? this.fieldKey : this.treeViewExpandedView === "links" ? "links" : undefined; if (expandKey !== undefined) { let remDoc = (doc: Doc) => this.remove(doc, expandKey); @@ -316,9 +316,9 @@ class TreeView extends React.Component { return
      {!docs ? (null) : TreeView.GetChildElements(docs, this.props.treeViewId, Doc.Layout(this.props.document), - this.templateDataDoc, expandKey, addDoc, remDoc, this.move, + this.templateDataDoc, expandKey, this.props.containingCollection, this.props.prevSibling, addDoc, remDoc, this.move, this.props.dropAction, this.props.addDocTab, this.props.pinToPres, this.props.ScreenToLocalTransform, - this.props.outerXf, this.props.active, this.props.panelWidth, this.props.ChromeHeight, this.props.renderDepth, this.props.showHeaderFields, this.props.preventTreeViewOpen, + this.props.outerXf, this.props.active, this.props.panelWidth, this.props.ChromeHeight, this.props.renderDepth, this.props.hideHeaderFields, this.props.preventTreeViewOpen, [...this.props.renderedIds, this.props.document[Id]])}
    ; } else if (this.treeViewExpandedView === "fields") { @@ -354,7 +354,6 @@ class TreeView extends React.Component { @computed get renderBullet() { - trace(); return
    { this.treeViewOpen = !this.treeViewOpen; e.stopPropagation(); })} style={{ color: StrCast(this.props.document.color, "black"), opacity: 0.4 }}> {}
    ; @@ -394,13 +393,12 @@ class TreeView extends React.Component { }} > {this.editableView("title")}
    - {this.props.showHeaderFields() ? headerElements : (null)} + {this.props.hideHeaderFields() ? (null) : headerElements} {openRight} ; } render() { - trace(); return
  • @@ -419,6 +417,8 @@ class TreeView extends React.Component { containingCollection: Doc, dataDoc: Doc | undefined, key: string, + parentCollectionDoc: Doc | undefined, + parentPrevSibling: Doc | undefined, add: (doc: Doc, relativeTo?: Doc, before?: boolean) => boolean, remove: ((doc: Doc) => boolean), move: DragManager.MoveFunction, @@ -431,7 +431,7 @@ class TreeView extends React.Component { panelWidth: () => number, ChromeHeight: undefined | (() => number), renderDepth: number, - showHeaderFields: () => boolean, + hideHeaderFields: () => boolean, preventTreeViewOpen: boolean, renderedIds: string[] ) { @@ -497,6 +497,15 @@ class TreeView extends React.Component { } } }; + let outdent = !parentCollectionDoc ? undefined : () => { + if (StrCast(parentCollectionDoc.layout).indexOf("fieldKey") !== -1) { + let fieldKeysub = StrCast(parentCollectionDoc.layout).split("fieldKey")[1]; + let fieldKey = fieldKeysub.split("\"")[1]; + Doc.AddDocToList(parentCollectionDoc, fieldKey, child, parentPrevSibling, false); + parentCollectionDoc.treeViewOpen = true; + remove(child); + } + }; let addDocument = (doc: Doc, relativeTo?: Doc, before?: boolean) => { return add(doc, relativeTo ? relativeTo : docs[i], before !== undefined ? before : false); }; @@ -509,10 +518,12 @@ class TreeView extends React.Component { document={pair.layout} dataDoc={pair.data} containingCollection={containingCollection} + prevSibling={docs[i]} treeViewId={treeViewId} ruleProvider={containingCollection.isRuleProvider && pair.layout.type !== DocumentType.TEXT ? containingCollection : containingCollection.ruleProvider as Doc} key={child[Id]} indentDocument={indent} + outdentDocument={outdent} renderDepth={renderDepth} deleteDoc={remove} addDocument={addDocument} @@ -527,7 +538,7 @@ class TreeView extends React.Component { outerXf={outerXf} parentKey={key} active={active} - showHeaderFields={showHeaderFields} + hideHeaderFields={hideHeaderFields} preventTreeViewOpen={preventTreeViewOpen} renderedIds={renderedIds} />; }); @@ -594,7 +605,6 @@ export class CollectionTreeView extends CollectionSubView(Document) { } render() { - trace(); let dropAction = StrCast(this.props.Document.dropAction) as dropActionType; let addDoc = (doc: Doc, relativeTo?: Doc, before?: boolean) => Doc.AddDocToList(this.props.Document, this.props.fieldKey, doc, relativeTo, before, false, false, false); let moveDoc = (d: Doc, target: Doc, addDoc: (doc: Doc) => boolean) => this.props.moveDocument(d, target, addDoc); @@ -622,7 +632,7 @@ export class CollectionTreeView extends CollectionSubView(Document) { {this.props.Document.allowClear ? this.renderClearButton : (null)}
      { - TreeView.GetChildElements(this.childDocs, this.props.Document[Id], this.props.Document, this.props.DataDoc, this.props.fieldKey, addDoc, this.remove, + TreeView.GetChildElements(this.childDocs, this.props.Document[Id], this.props.Document, this.props.DataDoc, this.props.fieldKey, this.props.ContainingCollectionDoc, undefined, addDoc, this.remove, moveDoc, dropAction, this.props.addDocTab, this.props.pinToPres, this.props.ScreenToLocalTransform, this.outerXf, this.props.active, this.props.PanelWidth, this.props.ChromeHeight, this.props.renderDepth, () => !this.props.Document.hideHeaderFields, BoolCast(this.props.Document.preventTreeViewOpen), []) diff --git a/src/client/views/nodes/ImageBox.scss b/src/client/views/nodes/ImageBox.scss index f28ca98f7..96ea4d0d6 100644 --- a/src/client/views/nodes/ImageBox.scss +++ b/src/client/views/nodes/ImageBox.scss @@ -7,7 +7,7 @@ transform-origin: top left; } -.imageBox-cont, .imageBox-cont-interactive { +.imageBox-cont, .imageBox-cont-dragging { padding: 0vw; position: absolute; text-align: center; @@ -17,10 +17,20 @@ max-height: 100%; pointer-events: none; background:transparent; + img { + height: auto; + width: 100%; + pointer-events: all; + } + .imageBox-fader { + pointer-events: all; + } } -.imageBox-fader { - pointer-events: all; +.imageBox-cont-dragging { + .imageBox-fader { + pointer-events: none; + } } .imageBox-dot { @@ -33,15 +43,6 @@ background: gray; } -.imageBox-cont img { - height: auto; - width: 100%; -} - -.imageBox-cont-interactive img { - height: auto; - width: 100%; -} #google-photos { transition: all 0.5s ease 0s; diff --git a/src/client/views/nodes/ImageBox.tsx b/src/client/views/nodes/ImageBox.tsx index 323b05a5a..bf82da281 100644 --- a/src/client/views/nodes/ImageBox.tsx +++ b/src/client/views/nodes/ImageBox.tsx @@ -28,6 +28,7 @@ import { CollectionFreeFormView } from '../collections/collectionFreeForm/Collec import { documentSchema } from '../../../new_fields/documentSchemas'; import { Id } from '../../../new_fields/FieldSymbols'; import { TraceMobx } from '../../../new_fields/util'; +import { SelectionManager } from '../../util/SelectionManager'; var requestImageSize = require('../../util/request-image-size'); var path = require('path'); const { Howl } = require('howler'); @@ -292,7 +293,7 @@ export class ImageBox extends DocAnnotatableComponent -
      - +
      + + {fadepath === srcpath ? (null) :
      + - {fadepath === srcpath ? (null) :
      -
      } -
      -
      - -
      - {this.considerGooglePhotosLink()} - -
      ); + onError={this.onError} />
      } +
    +
    + +
    + {this.considerGooglePhotosLink()} + +
  • ; } contentFunc = () => [this.content]; -- cgit v1.2.3-70-g09d2 From 96519f4537895edcd1a23f353322459de328a330 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 26 Nov 2019 19:46:54 -0500 Subject: pivot label fixes --- .../CollectionFreeFormLayoutEngines.tsx | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/client/views/collections/collectionFreeForm/CollectionFreeFormLayoutEngines.tsx b/src/client/views/collections/collectionFreeForm/CollectionFreeFormLayoutEngines.tsx index 48d330674..42e987a9a 100644 --- a/src/client/views/collections/collectionFreeForm/CollectionFreeFormLayoutEngines.tsx +++ b/src/client/views/collections/collectionFreeForm/CollectionFreeFormLayoutEngines.tsx @@ -1,11 +1,12 @@ import { Doc, Field, FieldResult } from "../../../../new_fields/Doc"; -import { NumCast, StrCast, Cast } from "../../../../new_fields/Types"; +import { NumCast, StrCast, Cast, DateCast } from "../../../../new_fields/Types"; import { ScriptBox } from "../../ScriptBox"; import { CompileScript } from "../../../util/Scripting"; import { ScriptField } from "../../../../new_fields/ScriptField"; import { OverlayView, OverlayElementOptions } from "../../OverlayView"; import { emptyFunction } from "../../../../Utils"; import React = require("react"); +import { DateField } from "../../../../new_fields/DateField"; interface PivotData { type: string; @@ -31,6 +32,16 @@ export interface ViewDefResult { bounds?: ViewDefBounds; } +function toLabel(target: FieldResult) { + if (target instanceof DateField) { + const date = DateCast(target).date; + if (date) { + return `${date.toDateString()} ${date.toTimeString()}`; + } + } + return String(target); +} + export function computePivotLayout(pivotDoc: Doc, childDocs: Doc[], childPairs: { layout: Doc, data?: Doc }[], viewDefsToJSX: (views: any) => ViewDefResult[]) { let layoutPoolData: Map<{ layout: Doc, data?: Doc }, any> = new Map(); const pivotAxisWidth = NumCast(pivotDoc.pivotWidth, 200); @@ -55,7 +66,7 @@ export function computePivotLayout(pivotDoc: Doc, childDocs: Doc[], childPairs: let xCount = 0; groupNames.push({ type: "text", - text: String(key), + text: toLabel(key), x, y: pivotAxisWidth + 50, width: pivotAxisWidth * 1.25 * numCols, -- cgit v1.2.3-70-g09d2 From 0e5445c6eb3cb04b2657d5b5abeb89e0b1538220 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 26 Nov 2019 20:18:38 -0500 Subject: improved diagnostics routine --- src/client/util/ClientDiagnostics.ts | 6 ++++-- src/server/ApiManagers/DiagnosticManager.ts | 8 ++++++-- 2 files changed, 10 insertions(+), 4 deletions(-) (limited to 'src') diff --git a/src/client/util/ClientDiagnostics.ts b/src/client/util/ClientDiagnostics.ts index e454cdecb..6f82a47db 100644 --- a/src/client/util/ClientDiagnostics.ts +++ b/src/client/util/ClientDiagnostics.ts @@ -16,10 +16,12 @@ export namespace ClientDiagnostics { }, 100); + let executed = false; const solrHandle = setInterval(async () => { const response = await fetch("/solrHeartbeat"); - if (!response) { - alert("Looks like SOLR is not running on your machine."); + if (!(await response.json()).running) { + !executed && alert("Looks like SOLR is not running on your machine."); + executed = true; clearInterval(solrHandle); } }, 100); diff --git a/src/server/ApiManagers/DiagnosticManager.ts b/src/server/ApiManagers/DiagnosticManager.ts index b775167b6..104985481 100644 --- a/src/server/ApiManagers/DiagnosticManager.ts +++ b/src/server/ApiManagers/DiagnosticManager.ts @@ -16,8 +16,12 @@ export default class DiagnosticManager extends ApiManager { method: Method.GET, subscription: "/solrHeartbeat", onValidation: async ({ res }) => { - const response = await request("http://localhost:8983"); - res.send(response !== undefined); + try { + await request("http://localhost:8983"); + res.send({ running: true }); + } catch (e) { + res.send({ running: false }); + } } }); -- cgit v1.2.3-70-g09d2 From 780240515a06d9d71a4b58a2559d8661478a560f Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 26 Nov 2019 20:34:33 -0500 Subject: cleanup after looking at changed files --- src/client/DocServer.ts | 2 -- src/client/util/ClientDiagnostics.ts | 4 ++-- src/client/util/Import & Export/ImageUtils.ts | 2 +- src/client/views/MainView.tsx | 1 - src/client/views/search/SearchBox.tsx | 2 +- src/server/ApiManagers/UserManager.ts | 1 - src/server/Initialization.ts | 1 - src/server/RouteManager.ts | 2 ++ 8 files changed, 6 insertions(+), 9 deletions(-) (limited to 'src') diff --git a/src/client/DocServer.ts b/src/client/DocServer.ts index 14479694c..2cec1046b 100644 --- a/src/client/DocServer.ts +++ b/src/client/DocServer.ts @@ -64,8 +64,6 @@ export namespace DocServer { } } - let connection_error = false; - export function init(protocol: string, hostname: string, port: number, identifier: string) { _cache = {}; GUID = identifier; diff --git a/src/client/util/ClientDiagnostics.ts b/src/client/util/ClientDiagnostics.ts index 6f82a47db..24f196252 100644 --- a/src/client/util/ClientDiagnostics.ts +++ b/src/client/util/ClientDiagnostics.ts @@ -13,7 +13,7 @@ export namespace ClientDiagnostics { } await fetch("/serverHeartbeat"); serverPolls--; - }, 100); + }, 1000 * 15); let executed = false; @@ -24,7 +24,7 @@ export namespace ClientDiagnostics { executed = true; clearInterval(solrHandle); } - }, 100); + }, 1000 * 15); } diff --git a/src/client/util/Import & Export/ImageUtils.ts b/src/client/util/Import & Export/ImageUtils.ts index ca80f3bca..6a9486f83 100644 --- a/src/client/util/Import & Export/ImageUtils.ts +++ b/src/client/util/Import & Export/ImageUtils.ts @@ -22,7 +22,7 @@ export namespace ImageUtils { export const ExportHierarchyToFileSystem = async (collection: Doc): Promise => { const a = document.createElement("a"); - a.href = Utils.prepend(`imageHierarchyExport/${collection[Id]}`); + a.href = Utils.prepend(`/imageHierarchyExport/${collection[Id]}`); a.download = `Dash Export [${StrCast(collection.title)}].zip`; a.click(); }; diff --git a/src/client/views/MainView.tsx b/src/client/views/MainView.tsx index f59da3cde..5231075a1 100644 --- a/src/client/views/MainView.tsx +++ b/src/client/views/MainView.tsx @@ -423,7 +423,6 @@ export class MainView extends React.Component { return !this.userDoc || !(sidebar instanceof Doc) ? (null) : (
    -
    HEY!
    diff --git a/src/client/views/search/SearchBox.tsx b/src/client/views/search/SearchBox.tsx index 1337d3f95..ff35542ed 100644 --- a/src/client/views/search/SearchBox.tsx +++ b/src/client/views/search/SearchBox.tsx @@ -89,7 +89,7 @@ export class SearchBox extends React.Component { public static async convertDataUri(imageUri: string, returnedFilename: string) { try { - let posting = Utils.prepend("uploadURI"); + let posting = Utils.prepend("/uploadURI"); const returnedUri = await rp.post(posting, { body: { uri: imageUri, diff --git a/src/server/ApiManagers/UserManager.ts b/src/server/ApiManagers/UserManager.ts index 8edeab16d..0f7d14320 100644 --- a/src/server/ApiManagers/UserManager.ts +++ b/src/server/ApiManagers/UserManager.ts @@ -9,7 +9,6 @@ interface ActivityUnit { duration: number; } - export default class UserManager extends ApiManager { protected initialize(register: Registration): void { diff --git a/src/server/Initialization.ts b/src/server/Initialization.ts index 76acb4363..8b633a7cd 100644 --- a/src/server/Initialization.ts +++ b/src/server/Initialization.ts @@ -40,7 +40,6 @@ export default async function InitializeServer(options: InitializationOptions) { server.use("*", ({ user, originalUrl }, _res, next) => { if (!originalUrl.includes("Heartbeat")) { const userEmail = user?.email; - console.log(ConsoleColors.Cyan, originalUrl, userEmail ?? ""); if (userEmail) { timeMap[userEmail] = Date.now(); } diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts index 3aae5734a..3a20d5af5 100644 --- a/src/server/RouteManager.ts +++ b/src/server/RouteManager.ts @@ -1,6 +1,7 @@ import RouteSubscriber from "./RouteSubscriber"; import { DashUserModel } from "./authentication/models/user_model"; import * as express from 'express'; +import { ConsoleColors } from "./ActionUtilities"; export enum Method { GET, @@ -52,6 +53,7 @@ export default class RouteManager { try { await toExecute(args); } catch (e) { + console.log(ConsoleColors.Red, target, user?.email ?? ""); if (onError) { onError({ ...core, error: e }); } else { -- cgit v1.2.3-70-g09d2 From 2f4c58306af19954b0c849efb503b9620fab6efe Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 26 Nov 2019 21:45:09 -0500 Subject: intermediate, not working --- .gitignore | 1 + src/server/ApiManagers/UploadManager.ts | 44 ++-- src/server/DashUploadUtils.ts | 342 +++++++++++++++++--------------- src/server/public/files/.gitignore | 2 - 4 files changed, 195 insertions(+), 194 deletions(-) delete mode 100644 src/server/public/files/.gitignore (limited to 'src') diff --git a/.gitignore b/.gitignore index 5161268ac..e5048cfc4 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,4 @@ dist/ .env ClientUtils.ts solr-8.1.1/server/ +src/server/public/files/ \ No newline at end of file diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index aca63a918..2a9faacd8 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -3,7 +3,7 @@ import { Method, _success } from "../RouteManager"; import * as formidable from 'formidable'; import v4 = require('uuid/v4'); var AdmZip = require('adm-zip'); -import * as path from 'path'; +import { extname, basename, dirname } from 'path'; import { createReadStream, createWriteStream, unlink, readFileSync } from "fs"; import { publicDirectory, filesDirectory } from ".."; import { Database } from "../database"; @@ -81,17 +81,17 @@ export default class UploadManager extends ApiManager { const zip = new AdmZip(path_2); zip.getEntries().forEach((entry: any) => { if (!entry.entryName.startsWith("files/")) return; - let dirname = path.dirname(entry.entryName) + "/"; - let extname = path.extname(entry.entryName); - let basename = path.basename(entry.entryName).split(".")[0]; + let directory = dirname(entry.entryName) + "/"; + let extension = extname(entry.entryName); + let base = basename(entry.entryName).split(".")[0]; try { zip.extractEntryTo(entry.entryName, publicDirectory, true, false); - dirname = "/" + dirname; + directory = "/" + directory; - createReadStream(publicDirectory + dirname + basename + extname).pipe(createWriteStream(publicDirectory + dirname + basename + "_o" + extname)); - createReadStream(publicDirectory + dirname + basename + extname).pipe(createWriteStream(publicDirectory + dirname + basename + "_s" + extname)); - createReadStream(publicDirectory + dirname + basename + extname).pipe(createWriteStream(publicDirectory + dirname + basename + "_m" + extname)); - createReadStream(publicDirectory + dirname + basename + extname).pipe(createWriteStream(publicDirectory + dirname + basename + "_l" + extname)); + createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + "_o" + extension)); + createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + "_s" + extension)); + createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + "_m" + extension)); + createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + "_l" + extension)); } catch (e) { console.log(e); } @@ -133,29 +133,9 @@ export default class UploadManager extends ApiManager { form.keepExtensions = true; return new Promise(resolve => { form.parse(req, async (_err, _fields, files) => { - let results: DashUploadUtils.ImageFileResponse[] = []; + let results: any[] = []; for (const key in files) { - const { type, path: location, name } = files[key]; - const filename = path.basename(location); - let uploadInformation: Opt; - if (filename.endsWith(".pdf")) { - let dataBuffer = readFileSync(filesDirectory + filename); - const result: ParsedPDF = await pdf(dataBuffer); - await new Promise((resolve, reject) => { - const path = filesDirectory + DashUploadUtils.Partitions.pdf_text + "/" + filename.substring(0, filename.length - ".pdf".length) + ".txt"; - createWriteStream(path).write(result.text, error => { - if (!error) { - resolve(); - } else { - reject(error); - } - }); - }); - } else { - uploadInformation = await DashUploadUtils.UploadImage(filesDirectory + filename, filename); - } - const exif = uploadInformation ? uploadInformation.exifData : undefined; - results.push({ name, type, path: `/files/${filename}`, exif }); + results.push(DashUploadUtils.upload(files[key])); } _success(res, results); resolve(); @@ -188,7 +168,7 @@ export default class UploadManager extends ApiManager { return; } return imageDataUri.outputFile(uri, filesDirectory + filename).then((savedName: string) => { - const ext = path.extname(savedName).toLowerCase(); + const ext = extname(savedName).toLowerCase(); const { pngs, jpgs } = SharedMediaTypes; let resizers = [ { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index 8a429b81b..839aada4b 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -7,6 +7,8 @@ import { ExifData, ExifImage } from 'exif'; import { Opt } from '../new_fields/Doc'; import { SharedMediaTypes } from './SharedMediaTypes'; import { filesDirectory } from '.'; +import { File } from 'formidable'; +import { extname, basename } from "path"; const uploadDirectory = path.join(__dirname, './public/files/'); @@ -45,175 +47,195 @@ export namespace DashUploadUtils { contentType?: string; } - const generate = (prefix: string, url: string) => `${prefix}upload_${Utils.GenerateGuid()}${sanitizeExtension(url)}`; - const sanitize = (filename: string) => filename.replace(/\s+/g, "_"); - const sanitizeExtension = (source: string) => { - let extension = path.extname(source); - extension = extension.toLowerCase(); - extension = extension.split("?")[0]; - return extension; - }; - - /** - * Uploads an image specified by the @param source to Dash's /public/files/ - * directory, and returns information generated during that upload - * - * @param {string} source is either the absolute path of an already uploaded image or - * the url of a remote image - * @param {string} filename dictates what to call the image. If not specified, - * the name {@param prefix}_upload_{GUID} - * @param {string} prefix is a string prepended to the generated image name in the - * event that @param filename is not specified - * - * @returns {UploadInformation} This method returns - * 1) the paths to the uploaded images (plural due to resizing) - * 2) the file name of each of the resized images - * 3) the size of the image, in bytes (4432130) - * 4) the content type of the image, i.e. image/(jpeg | png | ...) - */ - export const UploadImage = async (source: string, filename?: string, prefix: string = ""): Promise => { - const metadata = await InspectImage(source); - return UploadInspectedImage(metadata, filename, prefix); - }; - - export interface InspectionResults { - isLocal: boolean; - stream: any; - normalizedUrl: string; - exifData: EnrichedExifData; - contentSize?: number; - contentType?: string; - } - - export interface EnrichedExifData { - data: ExifData; - error?: string; - } - - export enum Partitions { - pdf_text, - images, - videos - } - - export async function buildFilePartitions() { - const pending = Object.keys(Partitions).map(sub => createIfNotExists(filesDirectory + sub)); - return Promise.all(pending); - } - - /** - * Based on the url's classification as local or remote, gleans - * as much information as possible about the specified image - * - * @param source is the path or url to the image in question - */ - export const InspectImage = async (source: string): Promise => { - const { isLocal, stream, normalized: normalizedUrl } = classify(source); - const exifData = await parseExifData(source); - const results = { - exifData, - isLocal, - stream, - normalizedUrl - }; - // stop here if local, since request.head() can't handle local paths, only urls on the web - if (isLocal) { - return results; + export function upload(file: File): any { + const { type, path, name } = file; + const filename = basename(path); + const extension = extname(path).toLowerCase(); + if (extension === ".pdf") { + + } else if { + let partition: Opt; + if(imageFormats.includes(extension)) { + partition = DashUploadUtils.Partitions.images; + } else if (videoFormats.includes(extension)) { + partition = DashUploadUtils.Partitions.videos; } - const metadata = (await new Promise((resolve, reject) => { - request.head(source, async (error, res) => { - if (error) { - return reject(error); - } - resolve(res); - }); - })).headers; - return { - contentSize: parseInt(metadata[size]), - contentType: metadata[type], - ...results - }; + let uploadInformation: Opt; + if (partition) { + uploadInformation = await DashUploadUtils.UploadImage(`${filesDirectory}/${partition}/${filename}`, filename); + } else { + console.log(`Unable to accommodate, and ignored, the following file upload: ${filename}`); + } + } + const exif = uploadInformation ? uploadInformation.exifData : undefined; + results.push({ name, type, path: `/files/${filename}`, exif }); + +} + +const generate = (prefix: string, url: string) => `${prefix}upload_${Utils.GenerateGuid()}${sanitizeExtension(url)}`; +const sanitize = (filename: string) => filename.replace(/\s+/g, "_"); +const sanitizeExtension = (source: string) => { + let extension = path.extname(source); + extension = extension.toLowerCase(); + extension = extension.split("?")[0]; + return extension; +}; + +/** + * Uploads an image specified by the @param source to Dash's /public/files/ + * directory, and returns information generated during that upload + * + * @param {string} source is either the absolute path of an already uploaded image or + * the url of a remote image + * @param {string} filename dictates what to call the image. If not specified, + * the name {@param prefix}_upload_{GUID} + * @param {string} prefix is a string prepended to the generated image name in the + * event that @param filename is not specified + * + * @returns {UploadInformation} This method returns + * 1) the paths to the uploaded images (plural due to resizing) + * 2) the file name of each of the resized images + * 3) the size of the image, in bytes (4432130) + * 4) the content type of the image, i.e. image/(jpeg | png | ...) + */ +export const UploadImage = async (source: string, filename?: string, prefix: string = ""): Promise => { + const metadata = await InspectImage(source); + return UploadInspectedImage(metadata, filename, prefix); +}; + +export interface InspectionResults { + isLocal: boolean; + stream: any; + normalizedUrl: string; + exifData: EnrichedExifData; + contentSize?: number; + contentType?: string; +} + +export interface EnrichedExifData { + data: ExifData; + error?: string; +} + +export enum Partitions { + pdf_text = "pdf_text", + images = "images", + videos = "videos" +} + +export async function buildFilePartitions() { + const pending = Object.keys(Partitions).map(sub => createIfNotExists(filesDirectory + sub)); + return Promise.all(pending); +} + +/** + * Based on the url's classification as local or remote, gleans + * as much information as possible about the specified image + * + * @param source is the path or url to the image in question + */ +export const InspectImage = async (source: string): Promise => { + const { isLocal, stream, normalized: normalizedUrl } = classify(source); + const exifData = await parseExifData(source); + const results = { + exifData, + isLocal, + stream, + normalizedUrl }; - - export const UploadInspectedImage = async (metadata: InspectionResults, filename?: string, prefix = ""): Promise => { - const { isLocal, stream, normalizedUrl, contentSize, contentType, exifData } = metadata; - const resolved = filename ? sanitize(filename) : generate(prefix, normalizedUrl); - const extension = sanitizeExtension(normalizedUrl || resolved); - let information: UploadInformation = { - mediaPaths: [], - fileNames: { clean: resolved }, - exifData, - contentSize, - contentType, - }; - const { pngs, imageFormats, jpgs, videoFormats } = SharedMediaTypes; - return new Promise(async (resolve, reject) => { - const resizers = [ - { resizer: sharp().rotate(), suffix: "_o" }, - ...Object.values(Sizes).map(size => ({ - resizer: sharp().resize(size.width, undefined, { withoutEnlargement: true }).rotate(), - suffix: size.suffix - })) - ]; - let nonVisual = false; - if (pngs.includes(extension)) { - resizers.forEach(element => element.resizer = element.resizer.png()); - } else if (jpgs.includes(extension)) { - resizers.forEach(element => element.resizer = element.resizer.jpeg()); - } else if (![...imageFormats, ...videoFormats].includes(extension.toLowerCase())) { - nonVisual = true; - } - if (imageFormats.includes(extension)) { - for (let resizer of resizers) { - const suffix = resizer.suffix; - let mediaPath: string; - await new Promise(resolve => { - const filename = resolved.substring(0, resolved.length - extension.length) + suffix + extension; - information.mediaPaths.push(mediaPath = uploadDirectory + filename); - information.fileNames[suffix] = filename; - stream(normalizedUrl).pipe(resizer.resizer).pipe(fs.createWriteStream(mediaPath)) - .on('close', resolve) - .on('error', reject); - }); - } - } - if (!isLocal || nonVisual) { - await new Promise(resolve => { - stream(normalizedUrl).pipe(fs.createWriteStream(uploadDirectory + resolved)).on('close', resolve); - }); + // stop here if local, since request.head() can't handle local paths, only urls on the web + if (isLocal) { + return results; + } + const metadata = (await new Promise((resolve, reject) => { + request.head(source, async (error, res) => { + if (error) { + return reject(error); } - resolve(information); + resolve(res); }); + })).headers; + return { + contentSize: parseInt(metadata[size]), + contentType: metadata[type], + ...results }; - - const classify = (url: string) => { - const isLocal = /Dash-Web(\\|\/)src(\\|\/)server(\\|\/)public(\\|\/)files/g.test(url); - return { - isLocal, - stream: isLocal ? fs.createReadStream : request, - normalized: isLocal ? path.normalize(url) : url - }; +}; + +export const UploadInspectedImage = async (metadata: InspectionResults, filename?: string, prefix = ""): Promise => { + const { isLocal, stream, normalizedUrl, contentSize, contentType, exifData } = metadata; + const resolved = filename ? sanitize(filename) : generate(prefix, normalizedUrl); + const extension = sanitizeExtension(normalizedUrl || resolved); + let information: UploadInformation = { + mediaPaths: [], + fileNames: { clean: resolved }, + exifData, + contentSize, + contentType, }; - - const parseExifData = async (source: string): Promise => { - return new Promise(resolve => { - new ExifImage(source, (error, data) => { - let reason: Opt = undefined; - if (error) { - reason = (error as any).code; - } - resolve({ data, error: reason }); + const { pngs, jpgs } = SharedMediaTypes; + return new Promise(async (resolve, reject) => { + const resizers = [ + { resizer: sharp().rotate(), suffix: "_o" }, + ...Object.values(Sizes).map(size => ({ + resizer: sharp().resize(size.width, undefined, { withoutEnlargement: true }).rotate(), + suffix: size.suffix + })) + ]; + if (pngs.includes(extension)) { + resizers.forEach(element => element.resizer = element.resizer.png()); + } else if (jpgs.includes(extension)) { + resizers.forEach(element => element.resizer = element.resizer.jpeg()); + } + for (let resizer of resizers) { + const suffix = resizer.suffix; + let mediaPath: string; + await new Promise(resolve => { + const filename = resolved.substring(0, resolved.length - extension.length) + suffix + extension; + information.mediaPaths.push(mediaPath = uploadDirectory + filename); + information.fileNames[suffix] = filename; + stream(normalizedUrl).pipe(resizer.resizer).pipe(fs.createWriteStream(mediaPath)) + .on('close', resolve) + .on('error', reject); + }); + } + if (!isLocal) { + await new Promise(resolve => { + stream(normalizedUrl).pipe(fs.createWriteStream(uploadDirectory + resolved)).on('close', resolve); }); - }); - }; - - export const createIfNotExists = async (path: string) => { - if (await new Promise(resolve => fs.exists(path, resolve))) { - return true; } - return new Promise(resolve => fs.mkdir(path, error => resolve(error === null))); + resolve(information); + }); +}; + +const classify = (url: string) => { + const isLocal = /Dash-Web(\\|\/)src(\\|\/)server(\\|\/)public(\\|\/)files/g.test(url); + return { + isLocal, + stream: isLocal ? fs.createReadStream : request, + normalized: isLocal ? path.normalize(url) : url }; +}; + +const parseExifData = async (source: string): Promise => { + return new Promise(resolve => { + new ExifImage(source, (error, data) => { + let reason: Opt = undefined; + if (error) { + reason = (error as any).code; + } + resolve({ data, error: reason }); + }); + }); +}; + +export const createIfNotExists = async (path: string) => { + if (await new Promise(resolve => fs.exists(path, resolve))) { + return true; + } + return new Promise(resolve => fs.mkdir(path, error => resolve(error === null))); +}; - export const Destroy = (mediaPath: string) => new Promise(resolve => fs.unlink(mediaPath, error => resolve(error === null))); +export const Destroy = (mediaPath: string) => new Promise(resolve => fs.unlink(mediaPath, error => resolve(error === null))); } \ No newline at end of file diff --git a/src/server/public/files/.gitignore b/src/server/public/files/.gitignore deleted file mode 100644 index c96a04f00..000000000 --- a/src/server/public/files/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -* -!.gitignore \ No newline at end of file -- cgit v1.2.3-70-g09d2 From df5584ccd40bd83f1362b32db67969e7ffbf2e3f Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 27 Nov 2019 04:03:30 -0500 Subject: improved file partitioning in server and generified upload method --- package.json | 2 + src/client/documents/Documents.ts | 1 - src/client/util/ClientDiagnostics.ts | 11 +- .../util/Import & Export/DirectoryImportBox.tsx | 19 +- src/client/views/Main.tsx | 2 +- src/client/views/collections/CollectionSubView.tsx | 22 +- src/client/views/pdf/PDFViewer.tsx | 3 +- src/server/ActionUtilities.ts | 17 +- src/server/ApiManagers/DownloadManager.ts | 4 +- src/server/ApiManagers/GooglePhotosManager.ts | 4 +- src/server/ApiManagers/PDFManager.ts | 34 +- src/server/ApiManagers/SearchManager.ts | 4 +- src/server/ApiManagers/UploadManager.ts | 47 ++- src/server/DashUploadUtils.ts | 388 +++++++++++---------- src/server/SharedMediaTypes.ts | 5 +- src/server/database.ts | 4 +- src/server/index.ts | 4 +- 17 files changed, 314 insertions(+), 257 deletions(-) (limited to 'src') diff --git a/package.json b/package.json index 393df8574..3725d76eb 100644 --- a/package.json +++ b/package.json @@ -105,6 +105,7 @@ "@types/react-table": "^6.7.22", "@types/request": "^2.48.1", "@types/request-promise": "^4.1.42", + "@types/rimraf": "^2.0.3", "@types/sharp": "^0.22.2", "@types/shelljs": "^0.8.5", "@types/socket.io": "^2.1.2", @@ -211,6 +212,7 @@ "readline": "^1.3.0", "request": "^2.88.0", "request-promise": "^4.2.4", + "rimraf": "^3.0.0", "serializr": "^1.5.1", "sharp": "^0.22.1", "shelljs": "^0.8.3", diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index dea057b93..a2f4d23c7 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -377,7 +377,6 @@ export namespace Docs { let extension = path.extname(target); target = `${target.substring(0, target.length - extension.length)}_o${extension}`; } - // if (target !== "http://www.cs.brown.edu/") { requestImageSize(target) .then((size: any) => { let aspect = size.height / size.width; diff --git a/src/client/util/ClientDiagnostics.ts b/src/client/util/ClientDiagnostics.ts index 24f196252..7eef935fd 100644 --- a/src/client/util/ClientDiagnostics.ts +++ b/src/client/util/ClientDiagnostics.ts @@ -1,9 +1,6 @@ -import { observable, runInAction } from "mobx"; -import { MainView } from "../views/MainView"; - export namespace ClientDiagnostics { - export function start() { + export async function start() { let serverPolls = 0; const serverHandle = setInterval(async () => { @@ -17,14 +14,16 @@ export namespace ClientDiagnostics { let executed = false; - const solrHandle = setInterval(async () => { + const handle = async () => { const response = await fetch("/solrHeartbeat"); if (!(await response.json()).running) { !executed && alert("Looks like SOLR is not running on your machine."); executed = true; clearInterval(solrHandle); } - }, 1000 * 15); + }; + await handle(); + const solrHandle = setInterval(handle, 1000 * 15); } diff --git a/src/client/util/Import & Export/DirectoryImportBox.tsx b/src/client/util/Import & Export/DirectoryImportBox.tsx index f0880f193..16ae50685 100644 --- a/src/client/util/Import & Export/DirectoryImportBox.tsx +++ b/src/client/util/Import & Export/DirectoryImportBox.tsx @@ -22,18 +22,10 @@ import "./DirectoryImportBox.scss"; import { Networking } from "../../Network"; import { BatchedArray } from "array-batcher"; import * as path from 'path'; -import { DashUploadUtils } from "../../../server/DashUploadUtils"; -import { SharedMediaTypes } from "../../../server/SharedMediaTypes"; +import { AcceptibleMedia } from "../../../server/SharedMediaTypes"; const unsupported = ["text/html", "text/plain"]; -interface ImageUploadResponse { - name: string; - path: string; - type: string; - exif: any; -} - @observer export default class DirectoryImportBox extends React.Component { private selector = React.createRef(); @@ -98,7 +90,7 @@ export default class DirectoryImportBox extends React.Component let file = files.item(i); if (file && !unsupported.includes(file.type)) { const ext = path.extname(file.name).toLowerCase(); - if (SharedMediaTypes.imageFormats.includes(ext)) { + if (AcceptibleMedia.imageFormats.includes(ext)) { validated.push(file); } } @@ -114,7 +106,7 @@ export default class DirectoryImportBox extends React.Component runInAction(() => this.phase = `Internal: uploading ${this.quota - this.completed} files to Dash...`); - const uploads = await BatchedArray.from(validated, { batchSize: 15 }).batchedMapAsync(async (batch, collector) => { + const uploads = await BatchedArray.from(validated, { batchSize: 15 }).batchedMapAsync(async (batch, collector) => { const formData = new FormData(); batch.forEach(file => { @@ -127,16 +119,17 @@ export default class DirectoryImportBox extends React.Component runInAction(() => this.completed += batch.length); }); + const size = "_o"; await Promise.all(uploads.map(async upload => { const type = upload.type; - const path = Utils.prepend(upload.path); + const path = Utils.prepend(upload.clientAccessPath); const options = { nativeWidth: 300, width: 300, title: upload.name }; const document = await Docs.Get.DocumentFromType(type, path, options); - const { data, error } = upload.exif; + const { data, error } = upload.exifData; if (document) { Doc.GetProto(document).exif = error || Docs.Get.DocumentHierarchyFromJson(data); docs.push(document); diff --git a/src/client/views/Main.tsx b/src/client/views/Main.tsx index dec4a24e4..9e699978f 100644 --- a/src/client/views/Main.tsx +++ b/src/client/views/Main.tsx @@ -10,7 +10,7 @@ import { ClientDiagnostics } from "../util/ClientDiagnostics"; AssignAllExtensions(); (async () => { - ClientDiagnostics.start(); + await ClientDiagnostics.start(); const info = await CurrentUserUtils.loadCurrentUser(); DocServer.init(window.location.protocol, window.location.hostname, 4321, info.email); await Docs.Prototypes.initialize(); diff --git a/src/client/views/collections/CollectionSubView.tsx b/src/client/views/collections/CollectionSubView.tsx index 1c3ff37ee..a1bd1527e 100644 --- a/src/client/views/collections/CollectionSubView.tsx +++ b/src/client/views/collections/CollectionSubView.tsx @@ -22,6 +22,7 @@ import React = require("react"); var path = require('path'); import { GooglePhotos } from "../../apis/google_docs/GooglePhotosClientUtils"; import { ImageUtils } from "../../util/Import & Export/ImageUtils"; +import { Networking } from "../../Network"; export interface CollectionViewProps extends FieldViewProps { addDocument: (document: Doc) => boolean; @@ -271,28 +272,25 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { let file = item.getAsFile(); let formData = new FormData(); - if (file) { - formData.append('file', file); + if (!file || !file.type) { + continue; } - let dropFileName = file ? file.name : "-empty-"; - let prom = fetch(Utils.prepend("/upload"), { - method: 'POST', - body: formData - }).then(async (res: Response) => { - (await res.json()).map(action((file: any) => { + formData.append('file', file); + let dropFileName = file ? file.name : "-empty-"; + promises.push(Networking.PostFormDataToServer("/upload", formData).then(results => { + results.map(action((file: any) => { let full = { ...options, nativeWidth: type.indexOf("video") !== -1 ? 600 : 300, width: 300, title: dropFileName }; - let pathname = Utils.prepend(file.path); + let pathname = Utils.prepend(file.clientAccessPath); Docs.Get.DocumentFromType(type, pathname, full).then(doc => { doc && (Doc.GetProto(doc).fileUpload = path.basename(pathname).replace("upload_", "").replace(/\.[a-z0-9]*$/, "")); doc && this.props.addDocument(doc); }); })); - }); - promises.push(prom); + })); } } - if (text) { + if (text && !text.includes("https://")) { this.props.addDocument(Docs.Create.TextDocument({ ...options, documentText: "@@@" + text, width: 400, height: 315 })); return; } diff --git a/src/client/views/pdf/PDFViewer.tsx b/src/client/views/pdf/PDFViewer.tsx index b737ce221..c075a4f99 100644 --- a/src/client/views/pdf/PDFViewer.tsx +++ b/src/client/views/pdf/PDFViewer.tsx @@ -125,7 +125,8 @@ export class PDFViewer extends DocAnnotatableComponent this._showWaiting = this._showCover = true); this.props.startupLive && this.setupPdfJsViewer(); this._searchReactionDisposer = reaction(() => this.Document.search_string, searchString => { diff --git a/src/server/ActionUtilities.ts b/src/server/ActionUtilities.ts index 7f493dd70..c9fc86fea 100644 --- a/src/server/ActionUtilities.ts +++ b/src/server/ActionUtilities.ts @@ -2,6 +2,7 @@ import * as fs from 'fs'; import { ExecOptions } from 'shelljs'; import { exec } from 'child_process'; import * as path from 'path'; +import * as rimraf from "rimraf"; export const command_line = (command: string, fromDirectory?: string) => { return new Promise((resolve, reject) => { @@ -68,4 +69,18 @@ export function msToTime(duration: number) { let secondsS = (seconds < 10) ? "0" + seconds : seconds; return hoursS + ":" + minutesS + ":" + secondsS + "." + milliseconds; -} \ No newline at end of file +} + +export const createIfNotExists = async (path: string) => { + if (await new Promise(resolve => fs.exists(path, resolve))) { + return true; + } + return new Promise(resolve => fs.mkdir(path, error => resolve(error === null))); +}; + +export async function Prune(rootDirectory: string): Promise { + const error = await new Promise(resolve => rimraf(rootDirectory, resolve)); + return error === null; +} + +export const Destroy = (mediaPath: string) => new Promise(resolve => fs.unlink(mediaPath, error => resolve(error === null))); \ No newline at end of file diff --git a/src/server/ApiManagers/DownloadManager.ts b/src/server/ApiManagers/DownloadManager.ts index fc6ba0d22..5bad46eda 100644 --- a/src/server/ApiManagers/DownloadManager.ts +++ b/src/server/ApiManagers/DownloadManager.ts @@ -5,7 +5,7 @@ import * as Archiver from 'archiver'; import * as express from 'express'; import { Database } from "../database"; import * as path from "path"; -import { DashUploadUtils } from "../DashUploadUtils"; +import { DashUploadUtils, SizeSuffix } from "../DashUploadUtils"; import { publicDirectory } from ".."; export type Hierarchy = { [id: string]: string | Hierarchy }; @@ -254,7 +254,7 @@ async function writeHierarchyRecursive(file: Archiver.Archiver, hierarchy: Hiera // and dropped in the browser and thus hosted remotely) so we upload it // to our server and point the zip file to it, so it can bundle up the bytes const information = await DashUploadUtils.UploadImage(result); - path = information.mediaPaths[0]; + path = information.serverAccessPaths[SizeSuffix.Original]; } // write the file specified by the path to the directory in the // zip file given by the prefix. diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts index c7af69375..5a709688b 100644 --- a/src/server/ApiManagers/GooglePhotosManager.ts +++ b/src/server/ApiManagers/GooglePhotosManager.ts @@ -86,10 +86,10 @@ export default class GooglePhotosManager extends ApiManager { const contents: { mediaItems: MediaItem[] } = req.body; let failed = 0; if (contents) { - const completed: Opt[] = []; + const completed: Opt[] = []; for (let item of contents.mediaItems) { const { contentSize, ...attributes } = await DashUploadUtils.InspectImage(item.baseUrl); - const found: Opt = await Database.Auxiliary.QueryUploadHistory(contentSize!); + const found: Opt = await Database.Auxiliary.QueryUploadHistory(contentSize!); if (!found) { const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error)); if (upload) { diff --git a/src/server/ApiManagers/PDFManager.ts b/src/server/ApiManagers/PDFManager.ts index 632b4965a..4bd750aaf 100644 --- a/src/server/ApiManagers/PDFManager.ts +++ b/src/server/ApiManagers/PDFManager.ts @@ -2,12 +2,12 @@ import ApiManager, { Registration } from "./ApiManager"; import { Method } from "../RouteManager"; import RouteSubscriber from "../RouteSubscriber"; import { exists, createReadStream, createWriteStream } from "fs"; -import { filesDirectory } from ".."; import * as Pdfjs from 'pdfjs-dist'; import { createCanvas } from "canvas"; const probe = require("probe-image-size"); import * as express from "express"; import * as path from "path"; +import { Directory, serverPathToFile, clientPathToFile } from "./UploadManager"; export default class PDFManager extends ApiManager { @@ -21,21 +21,27 @@ export default class PDFManager extends ApiManager { let noExt = filename.substring(0, filename.length - ".png".length); let pagenumber = parseInt(noExt.split('-')[1]); return new Promise(resolve => { - exists(filesDirectory + filename, (exists: boolean) => { - console.log(`${filesDirectory + filename} ${exists ? "exists" : "does not exist"}`); + const path = serverPathToFile(Directory.pdf_thumbnails, filename); + exists(path, (exists: boolean) => { + console.log(`${path} ${exists ? "exists" : "does not exist"}`); if (exists) { - let input = createReadStream(filesDirectory + filename); - probe(input, (err: any, result: any) => { + let input = createReadStream(path); + probe(input, (err: any, { width, height }: any) => { if (err) { console.log(err); console.log(`error on ${filename}`); return; } - res.send({ path: "/files/" + filename, width: result.width, height: result.height }); + res.send({ + path: clientPathToFile(Directory.pdf_thumbnails, filename), + width, + height + }); }); } else { - LoadPage(filesDirectory + filename.substring(0, filename.length - noExt.split('-')[1].length - ".PNG".length - 1) + ".pdf", pagenumber, res); + const name = filename.substring(0, filename.length - noExt.split('-')[1].length - ".PNG".length - 1) + ".pdf"; + LoadPage(serverPathToFile(Directory.pdfs, name), pagenumber, res); } resolve(); }); @@ -55,8 +61,8 @@ export default class PDFManager extends ApiManager { let canvasAndContext = factory.create(viewport.width, viewport.height); let renderContext = { canvasContext: canvasAndContext.context, - viewport: viewport, - canvasFactory: factory + canvasFactory: factory, + viewport }; console.log("read " + pageNumber); @@ -64,13 +70,17 @@ export default class PDFManager extends ApiManager { .then(() => { console.log("saving " + pageNumber); let stream = canvasAndContext.canvas.createPNGStream(); - let pngFile = `${file.substring(0, file.length - ".pdf".length)}-${pageNumber}.PNG`; + let filenames = path.basename(file).split("."); + const pngFile = serverPathToFile(Directory.pdf_thumbnails, `${filenames[0]}-${pageNumber}.png`); let out = createWriteStream(pngFile); stream.pipe(out); out.on("finish", () => { console.log(`Success! Saved to ${pngFile}`); - let name = path.basename(pngFile); - res.send({ path: "/files/" + name, width: viewport.width, height: viewport.height }); + res.send({ + path: pngFile, + width: viewport.width, + height: viewport.height + }); }); }, (reason: string) => { console.error(reason + ` ${pageNumber}`); diff --git a/src/server/ApiManagers/SearchManager.ts b/src/server/ApiManagers/SearchManager.ts index 1c801715a..d3f8995b0 100644 --- a/src/server/ApiManagers/SearchManager.ts +++ b/src/server/ApiManagers/SearchManager.ts @@ -3,7 +3,7 @@ import { Method } from "../RouteManager"; import { Search } from "../Search"; var findInFiles = require('find-in-files'); import * as path from 'path'; -import { filesDirectory } from ".."; +import { pathToDirectory, Directory } from "./UploadManager"; export default class SearchManager extends ApiManager { @@ -18,7 +18,7 @@ export default class SearchManager extends ApiManager { res.send([]); return; } - let results = await findInFiles.find({ 'term': q, 'flags': 'ig' }, filesDirectory + "text", ".txt$"); + let results = await findInFiles.find({ 'term': q, 'flags': 'ig' }, pathToDirectory(Directory.text), ".txt$"); let resObj: { ids: string[], numFound: number, lines: string[] } = { ids: [], numFound: 0, lines: [] }; for (var result in results) { resObj.ids.push(path.basename(result, ".txt").replace(/upload_/, "")); diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index 2a9faacd8..2f76871a6 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -7,14 +7,33 @@ import { extname, basename, dirname } from 'path'; import { createReadStream, createWriteStream, unlink, readFileSync } from "fs"; import { publicDirectory, filesDirectory } from ".."; import { Database } from "../database"; -import { DashUploadUtils } from "../DashUploadUtils"; -import { Opt } from "../../new_fields/Doc"; -import { ParsedPDF } from "../PdfTypes"; -const pdf = require('pdf-parse'); +import { DashUploadUtils, SizeSuffix } from "../DashUploadUtils"; import * as sharp from 'sharp'; -import { SharedMediaTypes } from "../SharedMediaTypes"; +import { AcceptibleMedia } from "../SharedMediaTypes"; +import { normalize } from "path"; const imageDataUri = require('image-data-uri'); +export enum Directory { + parsed_files = "parsed_files", + images = "images", + videos = "videos", + pdfs = "pdfs", + text = "text", + pdf_thumbnails = "pdf_thumbnails" +} + +export function serverPathToFile(directory: Directory, filename: string) { + return normalize(`${filesDirectory}/${directory}/${filename}`); +} + +export function pathToDirectory(directory: Directory) { + return normalize(`${filesDirectory}/${directory}`); +} + +export function clientPathToFile(directory: Directory, filename: string) { + return `/files/${directory}/${filename}`; +} + export default class UploadManager extends ApiManager { protected initialize(register: Registration): void { @@ -129,13 +148,14 @@ export default class UploadManager extends ApiManager { subscription: "/upload", onValidation: async ({ req, res }) => { let form = new formidable.IncomingForm(); - form.uploadDir = filesDirectory; + form.uploadDir = pathToDirectory(Directory.parsed_files); form.keepExtensions = true; return new Promise(resolve => { form.parse(req, async (_err, _fields, files) => { let results: any[] = []; for (const key in files) { - results.push(DashUploadUtils.upload(files[key])); + const result = await DashUploadUtils.upload(files[key]); + result && results.push(result); } _success(res, results); resolve(); @@ -150,8 +170,8 @@ export default class UploadManager extends ApiManager { onValidation: async ({ req, res }) => { const { source } = req.body; if (typeof source === "string") { - const uploadInformation = await DashUploadUtils.UploadImage(source); - return res.send(await DashUploadUtils.InspectImage(uploadInformation.mediaPaths[0])); + const { serverAccessPaths } = await DashUploadUtils.UploadImage(source); + return res.send(await DashUploadUtils.InspectImage(serverAccessPaths[SizeSuffix.Original])); } res.send({}); } @@ -167,9 +187,9 @@ export default class UploadManager extends ApiManager { res.status(401).send("incorrect parameters specified"); return; } - return imageDataUri.outputFile(uri, filesDirectory + filename).then((savedName: string) => { + return imageDataUri.outputFile(uri, serverPathToFile(Directory.images, filename)).then((savedName: string) => { const ext = extname(savedName).toLowerCase(); - const { pngs, jpgs } = SharedMediaTypes; + const { pngs, jpgs } = AcceptibleMedia; let resizers = [ { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: "_m" }, @@ -189,10 +209,11 @@ export default class UploadManager extends ApiManager { } if (isImage) { resizers.forEach(resizer => { - createReadStream(savedName).pipe(resizer.resizer).pipe(createWriteStream(filesDirectory + filename + resizer.suffix + ext)); + const path = serverPathToFile(Directory.images, filename + resizer.suffix + ext); + createReadStream(savedName).pipe(resizer.resizer).pipe(createWriteStream(path)); }); } - res.send("/files/" + filename + ext); + res.send(clientPathToFile(Directory.images, filename + ext)); }); } }); diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index 839aada4b..0a670ec01 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -5,18 +5,32 @@ import * as sharp from 'sharp'; import request = require('request-promise'); import { ExifData, ExifImage } from 'exif'; import { Opt } from '../new_fields/Doc'; -import { SharedMediaTypes } from './SharedMediaTypes'; +import { AcceptibleMedia } from './SharedMediaTypes'; import { filesDirectory } from '.'; import { File } from 'formidable'; -import { extname, basename } from "path"; +import { basename } from "path"; +import { ConsoleColors, createIfNotExists } from './ActionUtilities'; +import { ParsedPDF } from "../server/PdfTypes"; +const parse = require('pdf-parse'); +import { Directory, serverPathToFile, clientPathToFile } from './ApiManagers/UploadManager'; -const uploadDirectory = path.join(__dirname, './public/files/'); +export enum SizeSuffix { + Small = "_s", + Medium = "_m", + Large = "_l", + Original = "_o" +} export namespace DashUploadUtils { + function InjectSize(filename: string, size: SizeSuffix) { + const extension = path.extname(filename).toLowerCase(); + return filename.substring(0, filename.length - extension.length) + size + extension; + } + export interface Size { width: number; - suffix: string; + suffix: SizeSuffix; } export interface ImageFileResponse { @@ -27,215 +41,221 @@ export namespace DashUploadUtils { } export const Sizes: { [size: string]: Size } = { - SMALL: { width: 100, suffix: "_s" }, - MEDIUM: { width: 400, suffix: "_m" }, - LARGE: { width: 900, suffix: "_l" }, + SMALL: { width: 100, suffix: SizeSuffix.Small }, + MEDIUM: { width: 400, suffix: SizeSuffix.Medium }, + LARGE: { width: 900, suffix: SizeSuffix.Large }, }; export function validateExtension(url: string) { - return SharedMediaTypes.imageFormats.includes(path.extname(url).toLowerCase()); + return AcceptibleMedia.imageFormats.includes(path.extname(url).toLowerCase()); } const size = "content-length"; const type = "content-type"; - export interface UploadInformation { - mediaPaths: string[]; - fileNames: { [key: string]: string }; + export interface ImageUploadInformation { + clientAccessPath: string; + serverAccessPaths: { [key: string]: string }; exifData: EnrichedExifData; contentSize?: number; contentType?: string; } - export function upload(file: File): any { + export async function upload(file: File): Promise { const { type, path, name } = file; - const filename = basename(path); - const extension = extname(path).toLowerCase(); - if (extension === ".pdf") { - - } else if { - let partition: Opt; - if(imageFormats.includes(extension)) { - partition = DashUploadUtils.Partitions.images; - } else if (videoFormats.includes(extension)) { - partition = DashUploadUtils.Partitions.videos; - } - let uploadInformation: Opt; - if (partition) { - uploadInformation = await DashUploadUtils.UploadImage(`${filesDirectory}/${partition}/${filename}`, filename); - } else { - console.log(`Unable to accommodate, and ignored, the following file upload: ${filename}`); + const { imageFormats, videoFormats, applicationFormats } = AcceptibleMedia; + const types = type.split("/"); + + const category = types[0]; + const format = `.${types[1]}`; + + switch (category) { + case "image": + if (imageFormats.includes(format)) { + const { clientAccessPath } = await UploadImage(path, basename(path), format); + return { clientAccessPath, name, type }; + } + case "video": + if (videoFormats.includes(format)) { + return MoveParsedFile(path, Directory.videos); + } + case "application": + if (applicationFormats.includes(format)) { + return UploadPdf(path); + } } + console.log(ConsoleColors.Red, `Ignoring unsupported file ${name} with upload type (${type}).`); + return { clientAccessPath: undefined }; } - const exif = uploadInformation ? uploadInformation.exifData : undefined; - results.push({ name, type, path: `/files/${filename}`, exif }); -} + async function UploadPdf(absolutePath: string) { + let dataBuffer = fs.readFileSync(absolutePath); + const result: ParsedPDF = await parse(dataBuffer); + const parsedName = basename(absolutePath); + await new Promise((resolve, reject) => { + const textFilename = `${parsedName.substring(0, parsedName.length - 4)}.txt`; + const writeStream = fs.createWriteStream(serverPathToFile(Directory.text, textFilename)); + writeStream.write(result.text, error => error ? reject(error) : resolve()); + }); + return MoveParsedFile(absolutePath, Directory.pdfs); + } -const generate = (prefix: string, url: string) => `${prefix}upload_${Utils.GenerateGuid()}${sanitizeExtension(url)}`; -const sanitize = (filename: string) => filename.replace(/\s+/g, "_"); -const sanitizeExtension = (source: string) => { - let extension = path.extname(source); - extension = extension.toLowerCase(); - extension = extension.split("?")[0]; - return extension; -}; - -/** - * Uploads an image specified by the @param source to Dash's /public/files/ - * directory, and returns information generated during that upload - * - * @param {string} source is either the absolute path of an already uploaded image or - * the url of a remote image - * @param {string} filename dictates what to call the image. If not specified, - * the name {@param prefix}_upload_{GUID} - * @param {string} prefix is a string prepended to the generated image name in the - * event that @param filename is not specified - * - * @returns {UploadInformation} This method returns - * 1) the paths to the uploaded images (plural due to resizing) - * 2) the file name of each of the resized images - * 3) the size of the image, in bytes (4432130) - * 4) the content type of the image, i.e. image/(jpeg | png | ...) - */ -export const UploadImage = async (source: string, filename?: string, prefix: string = ""): Promise => { - const metadata = await InspectImage(source); - return UploadInspectedImage(metadata, filename, prefix); -}; - -export interface InspectionResults { - isLocal: boolean; - stream: any; - normalizedUrl: string; - exifData: EnrichedExifData; - contentSize?: number; - contentType?: string; -} + const generate = (prefix: string, url: string) => `${prefix}upload_${Utils.GenerateGuid()}${sanitizeExtension(url)}`; + const sanitizeExtension = (source: string) => { + let extension = path.extname(source); + extension = extension.toLowerCase(); + extension = extension.split("?")[0]; + return extension; + }; -export interface EnrichedExifData { - data: ExifData; - error?: string; -} + /** + * Uploads an image specified by the @param source to Dash's /public/files/ + * directory, and returns information generated during that upload + * + * @param {string} source is either the absolute path of an already uploaded image or + * the url of a remote image + * @param {string} filename dictates what to call the image. If not specified, + * the name {@param prefix}_upload_{GUID} + * @param {string} prefix is a string prepended to the generated image name in the + * event that @param filename is not specified + * + * @returns {ImageUploadInformation} This method returns + * 1) the paths to the uploaded images (plural due to resizing) + * 2) the file name of each of the resized images + * 3) the size of the image, in bytes (4432130) + * 4) the content type of the image, i.e. image/(jpeg | png | ...) + */ + export const UploadImage = async (source: string, filename?: string, format?: string, prefix: string = ""): Promise => { + const metadata = await InspectImage(source); + return UploadInspectedImage(metadata, filename, format, prefix); + }; -export enum Partitions { - pdf_text = "pdf_text", - images = "images", - videos = "videos" -} + export interface InspectionResults { + isLocal: boolean; + stream: any; + normalizedUrl: string; + exifData: EnrichedExifData; + contentSize?: number; + contentType?: string; + } -export async function buildFilePartitions() { - const pending = Object.keys(Partitions).map(sub => createIfNotExists(filesDirectory + sub)); - return Promise.all(pending); -} + export interface EnrichedExifData { + data: ExifData; + error?: string; + } -/** - * Based on the url's classification as local or remote, gleans - * as much information as possible about the specified image - * - * @param source is the path or url to the image in question - */ -export const InspectImage = async (source: string): Promise => { - const { isLocal, stream, normalized: normalizedUrl } = classify(source); - const exifData = await parseExifData(source); - const results = { - exifData, - isLocal, - stream, - normalizedUrl - }; - // stop here if local, since request.head() can't handle local paths, only urls on the web - if (isLocal) { - return results; + export async function buildFileDirectories() { + const pending = Object.keys(Directory).map(sub => createIfNotExists(`${filesDirectory}/${sub}`)); + return Promise.all(pending); } - const metadata = (await new Promise((resolve, reject) => { - request.head(source, async (error, res) => { - if (error) { - return reject(error); - } - resolve(res); - }); - })).headers; - return { - contentSize: parseInt(metadata[size]), - contentType: metadata[type], - ...results - }; -}; - -export const UploadInspectedImage = async (metadata: InspectionResults, filename?: string, prefix = ""): Promise => { - const { isLocal, stream, normalizedUrl, contentSize, contentType, exifData } = metadata; - const resolved = filename ? sanitize(filename) : generate(prefix, normalizedUrl); - const extension = sanitizeExtension(normalizedUrl || resolved); - let information: UploadInformation = { - mediaPaths: [], - fileNames: { clean: resolved }, - exifData, - contentSize, - contentType, - }; - const { pngs, jpgs } = SharedMediaTypes; - return new Promise(async (resolve, reject) => { - const resizers = [ - { resizer: sharp().rotate(), suffix: "_o" }, - ...Object.values(Sizes).map(size => ({ - resizer: sharp().resize(size.width, undefined, { withoutEnlargement: true }).rotate(), - suffix: size.suffix - })) - ]; - if (pngs.includes(extension)) { - resizers.forEach(element => element.resizer = element.resizer.png()); - } else if (jpgs.includes(extension)) { - resizers.forEach(element => element.resizer = element.resizer.jpeg()); - } - for (let resizer of resizers) { - const suffix = resizer.suffix; - let mediaPath: string; - await new Promise(resolve => { - const filename = resolved.substring(0, resolved.length - extension.length) + suffix + extension; - information.mediaPaths.push(mediaPath = uploadDirectory + filename); - information.fileNames[suffix] = filename; - stream(normalizedUrl).pipe(resizer.resizer).pipe(fs.createWriteStream(mediaPath)) - .on('close', resolve) - .on('error', reject); - }); + + /** + * Based on the url's classification as local or remote, gleans + * as much information as possible about the specified image + * + * @param source is the path or url to the image in question + */ + export const InspectImage = async (source: string): Promise => { + const { isLocal, stream, normalized: normalizedUrl } = classify(source); + const exifData = await parseExifData(source); + const results = { + exifData, + isLocal, + stream, + normalizedUrl + }; + // stop here if local, since request.head() can't handle local paths, only urls on the web + if (isLocal) { + return results; } - if (!isLocal) { - await new Promise(resolve => { - stream(normalizedUrl).pipe(fs.createWriteStream(uploadDirectory + resolved)).on('close', resolve); + const metadata = (await new Promise((resolve, reject) => { + request.head(source, async (error, res) => { + if (error) { + return reject(error); + } + resolve(res); }); - } - resolve(information); - }); -}; - -const classify = (url: string) => { - const isLocal = /Dash-Web(\\|\/)src(\\|\/)server(\\|\/)public(\\|\/)files/g.test(url); - return { - isLocal, - stream: isLocal ? fs.createReadStream : request, - normalized: isLocal ? path.normalize(url) : url + })).headers; + return { + contentSize: parseInt(metadata[size]), + contentType: metadata[type], + ...results + }; }; -}; - -const parseExifData = async (source: string): Promise => { - return new Promise(resolve => { - new ExifImage(source, (error, data) => { - let reason: Opt = undefined; - if (error) { - reason = (error as any).code; + + export async function MoveParsedFile(absolutePath: string, destination: Directory): Promise<{ clientAccessPath: Opt }> { + return new Promise<{ clientAccessPath: Opt }>(resolve => { + const filename = basename(absolutePath); + const destinationPath = serverPathToFile(destination, filename); + fs.rename(absolutePath, destinationPath, error => { + resolve({ clientAccessPath: error ? undefined : clientPathToFile(destination, filename) }); + }); + }); + } + + export const UploadInspectedImage = async (metadata: InspectionResults, filename?: string, format?: string, prefix = ""): Promise => { + const { isLocal, stream, normalizedUrl, contentSize, contentType, exifData } = metadata; + const resolved = filename || generate(prefix, normalizedUrl); + const extension = format || sanitizeExtension(normalizedUrl || resolved); + let information: ImageUploadInformation = { + clientAccessPath: clientPathToFile(Directory.images, resolved), + serverAccessPaths: {}, + exifData, + contentSize, + contentType, + }; + const { pngs, jpgs } = AcceptibleMedia; + return new Promise(async (resolve, reject) => { + const resizers = [ + { resizer: sharp().rotate(), suffix: SizeSuffix.Original }, + ...Object.values(Sizes).map(size => ({ + resizer: sharp().resize(size.width, undefined, { withoutEnlargement: true }).rotate(), + suffix: size.suffix + })) + ]; + if (pngs.includes(extension)) { + resizers.forEach(element => element.resizer = element.resizer.png()); + } else if (jpgs.includes(extension)) { + resizers.forEach(element => element.resizer = element.resizer.jpeg()); + } + for (let { resizer, suffix } of resizers) { + let mediaPath: string; + await new Promise(resolve => { + const filename = InjectSize(resolved, suffix); + information.serverAccessPaths[suffix] = serverPathToFile(Directory.images, filename); + stream(normalizedUrl).pipe(resizer).pipe(fs.createWriteStream(serverPathToFile(Directory.images, filename))) + .on('close', resolve) + .on('error', reject); + }); } - resolve({ data, error: reason }); + if (isLocal) { + await new Promise(resolve => { + fs.unlink(normalizedUrl, error => resolve(error === null)); + }); + } + resolve(information); }); - }); -}; + }; -export const createIfNotExists = async (path: string) => { - if (await new Promise(resolve => fs.exists(path, resolve))) { - return true; - } - return new Promise(resolve => fs.mkdir(path, error => resolve(error === null))); -}; + const classify = (url: string) => { + const isLocal = /Dash-Web(\\|\/)src(\\|\/)server(\\|\/)public(\\|\/)files/g.test(url); + return { + isLocal, + stream: isLocal ? fs.createReadStream : request, + normalized: isLocal ? path.normalize(url) : url + }; + }; -export const Destroy = (mediaPath: string) => new Promise(resolve => fs.unlink(mediaPath, error => resolve(error === null))); + const parseExifData = async (source: string): Promise => { + return new Promise(resolve => { + new ExifImage(source, (error, data) => { + let reason: Opt = undefined; + if (error) { + reason = (error as any).code; + } + resolve({ data, error: reason }); + }); + }); + }; } \ No newline at end of file diff --git a/src/server/SharedMediaTypes.ts b/src/server/SharedMediaTypes.ts index 3d3234125..8d0f441f0 100644 --- a/src/server/SharedMediaTypes.ts +++ b/src/server/SharedMediaTypes.ts @@ -1,9 +1,8 @@ -export namespace SharedMediaTypes { - +export namespace AcceptibleMedia { export const gifs = [".gif"]; export const pngs = [".png"]; export const jpgs = [".jpg", ".jpeg"]; export const imageFormats = [...pngs, ...jpgs, ...gifs]; export const videoFormats = [".mov", ".mp4"]; - + export const applicationFormats = [".pdf"]; } \ No newline at end of file diff --git a/src/server/database.ts b/src/server/database.ts index b81fc03a4..db81245c1 100644 --- a/src/server/database.ts +++ b/src/server/database.ts @@ -293,7 +293,7 @@ export namespace Database { }; export const QueryUploadHistory = async (contentSize: number) => { - return SanitizedSingletonQuery({ contentSize }, AuxiliaryCollections.GooglePhotosUploadHistory); + return SanitizedSingletonQuery({ contentSize }, AuxiliaryCollections.GooglePhotosUploadHistory); }; export namespace GoogleAuthenticationToken { @@ -322,7 +322,7 @@ export namespace Database { } - export const LogUpload = async (information: DashUploadUtils.UploadInformation) => { + export const LogUpload = async (information: DashUploadUtils.ImageUploadInformation) => { const bundle = { _id: Utils.GenerateDeterministicGuid(String(information.contentSize!)), ...information diff --git a/src/server/index.ts b/src/server/index.ts index d02a6005e..d77923710 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -24,7 +24,7 @@ import GooglePhotosManager from "./ApiManagers/GooglePhotosManager"; import DiagnosticManager from "./ApiManagers/DiagnosticManager"; export const publicDirectory = path.resolve(__dirname, "public"); -export const filesDirectory = path.resolve(publicDirectory, "files") + "/"; +export const filesDirectory = path.resolve(publicDirectory, "files"); /** * These are the functions run before the server starts @@ -34,7 +34,7 @@ export const filesDirectory = path.resolve(publicDirectory, "files") + "/"; async function preliminaryFunctions() { await GoogleCredentialsLoader.loadCredentials(); GoogleApiServerUtils.processProjectCredentials(); - await DashUploadUtils.buildFilePartitions(); + await DashUploadUtils.buildFileDirectories(); await log_execution({ startMessage: "attempting to initialize mongodb connection", endMessage: "connection outcome determined", -- cgit v1.2.3-70-g09d2 From db6d3f77b1b429c1942019b79c44e378eb8b1ee4 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 27 Nov 2019 04:25:55 -0500 Subject: fixed google photos upload by appending size suffix --- src/client/util/Import & Export/DirectoryImportBox.tsx | 10 ++++------ src/server/ApiManagers/GooglePhotosManager.ts | 4 ++-- src/server/DashUploadUtils.ts | 14 +++++++------- 3 files changed, 13 insertions(+), 15 deletions(-) (limited to 'src') diff --git a/src/client/util/Import & Export/DirectoryImportBox.tsx b/src/client/util/Import & Export/DirectoryImportBox.tsx index 16ae50685..b5e806a97 100644 --- a/src/client/util/Import & Export/DirectoryImportBox.tsx +++ b/src/client/util/Import & Export/DirectoryImportBox.tsx @@ -119,17 +119,15 @@ export default class DirectoryImportBox extends React.Component runInAction(() => this.completed += batch.length); }); - const size = "_o"; - await Promise.all(uploads.map(async upload => { - const type = upload.type; - const path = Utils.prepend(upload.clientAccessPath); + await Promise.all(uploads.map(async ({ name, type, clientAccessPath, exifData }) => { + const path = Utils.prepend(clientAccessPath); const options = { nativeWidth: 300, width: 300, - title: upload.name + title: name }; const document = await Docs.Get.DocumentFromType(type, path, options); - const { data, error } = upload.exifData; + const { data, error } = exifData; if (document) { Doc.GetProto(document).exif = error || Docs.Get.DocumentHierarchyFromJson(data); docs.push(document); diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts index 5a709688b..4a0c0b936 100644 --- a/src/server/ApiManagers/GooglePhotosManager.ts +++ b/src/server/ApiManagers/GooglePhotosManager.ts @@ -5,7 +5,7 @@ import { GoogleApiServerUtils } from "../apis/google/GoogleApiServerUtils"; import { BatchedArray, TimeUnit } from "array-batcher"; import { GooglePhotosUploadUtils } from "../apis/google/GooglePhotosUploadUtils"; import { Opt } from "../../new_fields/Doc"; -import { DashUploadUtils } from "../DashUploadUtils"; +import { DashUploadUtils, InjectSize, SizeSuffix } from "../DashUploadUtils"; import { Database } from "../database"; const authenticationError = "Unable to authenticate Google credentials before uploading to Google Photos!"; @@ -55,7 +55,7 @@ export default class GooglePhotosManager extends ApiManager { for (let index = 0; index < batch.length; index++) { const { url, description } = batch[index]; const fail = (reason: string) => failed.push({ reason, batch: completedBatches + 1, index, url }); - const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(token, url).catch(fail); + const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(token, InjectSize(url, SizeSuffix.Original)).catch(fail); if (!uploadToken) { fail(`${path.extname(url)} is not an accepted extension`); } else { diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index 0a670ec01..81cd2d602 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -21,12 +21,12 @@ export enum SizeSuffix { Original = "_o" } -export namespace DashUploadUtils { +export function InjectSize(filename: string, size: SizeSuffix) { + const extension = path.extname(filename).toLowerCase(); + return filename.substring(0, filename.length - extension.length) + size + extension; +} - function InjectSize(filename: string, size: SizeSuffix) { - const extension = path.extname(filename).toLowerCase(); - return filename.substring(0, filename.length - extension.length) + size + extension; - } +export namespace DashUploadUtils { export interface Size { width: number; @@ -72,8 +72,8 @@ export namespace DashUploadUtils { switch (category) { case "image": if (imageFormats.includes(format)) { - const { clientAccessPath } = await UploadImage(path, basename(path), format); - return { clientAccessPath, name, type }; + const results = await UploadImage(path, basename(path), format); + return { ...results, name, type }; } case "video": if (videoFormats.includes(format)) { -- cgit v1.2.3-70-g09d2 From ee1ac7b8c9550cc842e91985c1a92d79ce0e5235 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 27 Nov 2019 05:34:27 -0500 Subject: constant --- src/server/DashUploadUtils.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'src') diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index 81cd2d602..c831eb072 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -61,9 +61,10 @@ export namespace DashUploadUtils { contentType?: string; } + const { imageFormats, videoFormats, applicationFormats } = AcceptibleMedia; + export async function upload(file: File): Promise { const { type, path, name } = file; - const { imageFormats, videoFormats, applicationFormats } = AcceptibleMedia; const types = type.split("/"); const category = types[0]; -- cgit v1.2.3-70-g09d2 From e5ba8e86b4da2af65bde85589baad5a6fae9f627 Mon Sep 17 00:00:00 2001 From: Bob Zeleznik Date: Wed, 27 Nov 2019 09:39:19 -0500 Subject: fixes for fade image overlays --- src/client/views/nodes/ImageBox.scss | 1 + src/client/views/nodes/ImageBox.tsx | 31 ++++++++++++++++++++++++------- 2 files changed, 25 insertions(+), 7 deletions(-) (limited to 'src') diff --git a/src/client/views/nodes/ImageBox.scss b/src/client/views/nodes/ImageBox.scss index 96ea4d0d6..3b42c2352 100644 --- a/src/client/views/nodes/ImageBox.scss +++ b/src/client/views/nodes/ImageBox.scss @@ -112,6 +112,7 @@ .imageBox-fadeBlocker { width: 100%; height: 100%; + position: absolute; background: black; display: flex; flex-direction: row; diff --git a/src/client/views/nodes/ImageBox.tsx b/src/client/views/nodes/ImageBox.tsx index bf82da281..14523b2b4 100644 --- a/src/client/views/nodes/ImageBox.tsx +++ b/src/client/views/nodes/ImageBox.tsx @@ -232,6 +232,22 @@ export class ImageBox extends DocAnnotatableComponent console.log(err)); } + fadesize = (srcpath: string) => { + requestImageSize(srcpath) + .then((size: any) => { + let rotation = NumCast(this.dataDoc.rotation) % 180; + let realsize = rotation === 90 || rotation === 270 ? { height: size.width, width: size.height } : size; + let aspect = realsize.height / realsize.width; + if (this.Document.width && (Math.abs(1 - NumCast(this.Document.height) / NumCast(this.Document.width) / (realsize.height / realsize.width)) > 0.1)) { + setTimeout(action(() => { + this.Document.height = this.Document[WidthSym]() * aspect; + this.Document.nativeHeight = realsize.height; + this.Document.nativeWidth = realsize.width; + }), 0); + } + }) + .catch((err: any) => console.log(err)); + } @action onPointerEnter = () => { @@ -280,25 +296,26 @@ export class ImageBox extends DocAnnotatableComponent 20) { let alts = DocListCast(extensionDoc.Alternates); - let altpaths = alts.filter(doc => doc.data instanceof ImageField).map(doc => this.choosePath((doc.data as ImageField).url)); + let altpaths = alts.filter(doc => doc.data instanceof ImageField).map(doc => [this.choosePath((doc.data as ImageField).url), doc[WidthSym]() / doc[HeightSym]()]); let field = this.dataDoc[this.props.fieldKey]; // if (w < 100 && this._smallRetryCount < 10) this._curSuffix = "_s"; // else if (w < 600 && this._mediumRetryCount < 10) this._curSuffix = "_m"; // else if (this._largeRetryCount < 10) this._curSuffix = "_l"; - if (field instanceof ImageField) paths = [this.choosePath(field.url)]; + if (field instanceof ImageField) paths = [[this.choosePath(field.url), nativeWidth / nativeHeight]]; paths.push(...altpaths); // } let dragging = !SelectionManager.GetIsDragging() ? "" : "-dragging"; let rotation = NumCast(this.Document.rotation, 0); let aspect = (rotation % 180) ? this.Document[HeightSym]() / this.Document[WidthSym]() : 1; let shift = (rotation % 180) ? (nativeHeight - nativeWidth / aspect) / 2 : 0; - let srcpath = paths[Math.min(paths.length - 1, (this.Document.curPage || 0))]; - let fadepath = paths[Math.min(paths.length - 1, 1)]; + let srcpath = paths[Math.min(paths.length - 1, (this.Document.curPage || 0))][0] as string; + let srcaspect = paths[Math.min(paths.length - 1, (this.Document.curPage || 0))][1] as number; + let fadepath = paths[Math.min(paths.length - 1, 1)][0] as string; !this.Document.ignoreAspect && this.resize(srcpath); @@ -310,7 +327,7 @@ export class ImageBox extends DocAnnotatableComponent - {fadepath === srcpath ? (null) :
    + {fadepath === srcpath ? (null) :
    Date: Wed, 27 Nov 2019 12:55:49 -0500 Subject: fixing templates & workspace/document sidebar treeviews --- src/client/documents/Documents.ts | 2 +- src/client/views/MainView.tsx | 13 +++++------ src/client/views/collections/CollectionSubView.tsx | 2 +- .../views/collections/CollectionTreeView.tsx | 5 +++-- src/client/views/nodes/FormattedTextBox.tsx | 26 +++++++++++----------- src/client/views/search/FilterBox.tsx | 2 +- src/new_fields/Doc.ts | 18 +++++++-------- 7 files changed, 34 insertions(+), 34 deletions(-) (limited to 'src') diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index dea057b93..0befec1da 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -299,7 +299,7 @@ export namespace Docs { // whatever options pertain to this specific prototype let options = { title, type, baseProto: true, ...defaultOptions, ...(template.options || {}) }; options.layout = layout.view.LayoutString(layout.dataField); - return Doc.assign(new Doc(prototypeId, true), { ...options, baseLayout: options.layout }); + return Doc.assign(new Doc(prototypeId, true), { ...options }); } } diff --git a/src/client/views/MainView.tsx b/src/client/views/MainView.tsx index e6dd2fcad..f3a1e799c 100644 --- a/src/client/views/MainView.tsx +++ b/src/client/views/MainView.tsx @@ -187,22 +187,21 @@ export class MainView extends React.Component { @action createNewWorkspace = async (id?: string) => { + let workspaces = Cast(this.userDoc.workspaces, Doc) as Doc; + let workspaceCount = DocListCast(workspaces.data).length + 1; let freeformOptions: DocumentOptions = { x: 0, y: 400, width: this._panelWidth * .7, height: this._panelHeight, - title: "My Blank Collection", + title: "Collection " + workspaceCount, backgroundColor: "white" }; - let workspaces: FieldResult; let freeformDoc = CurrentUserUtils.GuestTarget || Docs.Create.FreeformDocument([], freeformOptions); + Doc.AddDocToList(Doc.GetProto(CurrentUserUtils.UserDocument.documents as Doc), "data", freeformDoc); var dockingLayout = { content: [{ type: 'row', content: [CollectionDockingView.makeDocumentConfig(freeformDoc, freeformDoc, 600)] }] }; - let mainDoc = Docs.Create.DockDocument([freeformDoc], JSON.stringify(dockingLayout), {}, id); - if (this.userDoc && ((workspaces = Cast(this.userDoc.workspaces, Doc)) instanceof Doc)) { - Doc.AddDocToList(workspaces, "data", mainDoc); - mainDoc.title = `Workspace ${DocListCast(workspaces.data).length}`; - } + let mainDoc = Docs.Create.DockDocument([freeformDoc], JSON.stringify(dockingLayout), { title: `Workspace ${workspaceCount}` }, id); + Doc.AddDocToList(workspaces, "data", mainDoc); // bcz: strangely, we need a timeout to prevent exceptions/issues initializing GoldenLayout (the rendering engine for Main Container) setTimeout(() => this.openWorkspace(mainDoc), 0); } diff --git a/src/client/views/collections/CollectionSubView.tsx b/src/client/views/collections/CollectionSubView.tsx index d7e9494a3..368e988d4 100644 --- a/src/client/views/collections/CollectionSubView.tsx +++ b/src/client/views/collections/CollectionSubView.tsx @@ -218,7 +218,7 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { } }); } else { - let htmlDoc = Docs.Create.HtmlDocument(html, { ...options, width: 300, height: 300, documentText: text }); + let htmlDoc = Docs.Create.HtmlDocument(html, { ...options, title: "-web page-", width: 300, height: 300, documentText: text }); this.props.addDocument(htmlDoc); } return; diff --git a/src/client/views/collections/CollectionTreeView.tsx b/src/client/views/collections/CollectionTreeView.tsx index 42cdd1455..83f09b8e9 100644 --- a/src/client/views/collections/CollectionTreeView.tsx +++ b/src/client/views/collections/CollectionTreeView.tsx @@ -588,7 +588,8 @@ export class CollectionTreeView extends CollectionSubView(Document) { ContextMenu.Instance.displayMenu(e.pageX - 15, e.pageY - 15); } else { let layoutItems: ContextMenuProps[] = []; - layoutItems.push({ description: this.props.Document.preventTreeViewOpen ? "Persist Treeview State" : "Abandon Treeview State", event: () => this.props.Document.preventTreeViewOpen = !this.props.Document.preventTreeViewOpen, icon: "paint-brush" }); + layoutItems.push({ description: (this.props.Document.preventTreeViewOpen ? "Persist" : "Abandon") + "Treeview State", event: () => this.props.Document.preventTreeViewOpen = !this.props.Document.preventTreeViewOpen, icon: "paint-brush" }); + layoutItems.push({ description: (this.props.Document.hideHeaderFields ? "Show" : "Hide") + " Header Fields", event: () => this.props.Document.hideHeaderFields = !this.props.Document.hideHeaderFields, icon: "paint-brush" }); ContextMenu.Instance.addItem({ description: "Treeview Options ...", subitems: layoutItems, icon: "eye" }); } } @@ -634,7 +635,7 @@ export class CollectionTreeView extends CollectionSubView(Document) { { TreeView.GetChildElements(this.childDocs, this.props.Document[Id], this.props.Document, this.props.DataDoc, this.props.fieldKey, this.props.ContainingCollectionDoc, undefined, addDoc, this.remove, moveDoc, dropAction, this.props.addDocTab, this.props.pinToPres, this.props.ScreenToLocalTransform, - this.outerXf, this.props.active, this.props.PanelWidth, this.props.ChromeHeight, this.props.renderDepth, () => !this.props.Document.hideHeaderFields, + this.outerXf, this.props.active, this.props.PanelWidth, this.props.ChromeHeight, this.props.renderDepth, () => BoolCast(this.props.Document.hideHeaderFields), BoolCast(this.props.Document.preventTreeViewOpen), []) } diff --git a/src/client/views/nodes/FormattedTextBox.tsx b/src/client/views/nodes/FormattedTextBox.tsx index d601e188d..657b4b406 100644 --- a/src/client/views/nodes/FormattedTextBox.tsx +++ b/src/client/views/nodes/FormattedTextBox.tsx @@ -267,19 +267,19 @@ export class FormattedTextBox extends DocAnnotatableComponent<(FieldViewProps & // embed document when dragging with a userDropAction or an embedDoc flag set } else if (de.data.userDropAction || de.data.embedDoc) { let target = de.data.droppedDocuments[0]; - const link = DocUtils.MakeLink({ doc: this.dataDoc, ctx: this.props.ContainingCollectionDoc }, { doc: target }, "Embedded Doc:" + target.title); - if (link) { - target.fitToBox = true; - let node = schema.nodes.dashDoc.create({ - width: target[WidthSym](), height: target[HeightSym](), - title: "dashDoc", docid: target[Id], - float: "right" - }); - let view = this._editorView!; - view.dispatch(view.state.tr.insert(view.posAtCoords({ left: de.x, top: de.y })!.pos, node)); - this.tryUpdateHeight(); - e.stopPropagation(); - } + // const link = DocUtils.MakeLink({ doc: this.dataDoc, ctx: this.props.ContainingCollectionDoc }, { doc: target }, "Embedded Doc:" + target.title); + // if (link) { + target.fitToBox = true; + let node = schema.nodes.dashDoc.create({ + width: target[WidthSym](), height: target[HeightSym](), + title: "dashDoc", docid: target[Id], + float: "right" + }); + let view = this._editorView!; + view.dispatch(view.state.tr.insert(view.posAtCoords({ left: de.x, top: de.y })!.pos, node)); + this.tryUpdateHeight(); + e.stopPropagation(); + // } } // otherwise, fall through to outer collection to handle drop } } diff --git a/src/client/views/search/FilterBox.tsx b/src/client/views/search/FilterBox.tsx index 62f3aba4c..7812c9d97 100644 --- a/src/client/views/search/FilterBox.tsx +++ b/src/client/views/search/FilterBox.tsx @@ -187,7 +187,7 @@ export class FilterBox extends React.Component { let collections: Doc[] = []; selectedDocs.forEach(async element => { - let layout: string = StrCast(element.props.Document.baseLayout); + let layout: string = StrCast(element.props.Document.layout); //checks if selected view (element) is a collection. if it is, adds to list to search through if (layout.indexOf("Collection") > -1) { //makes sure collections aren't added more than once diff --git a/src/new_fields/Doc.ts b/src/new_fields/Doc.ts index 271b7cfd3..a811322d7 100644 --- a/src/new_fields/Doc.ts +++ b/src/new_fields/Doc.ts @@ -602,15 +602,15 @@ export namespace Doc { fieldTemplate.scale = 1; fieldTemplate.showTitle = suppressTitle ? undefined : "title"; let data = fieldTemplate.data; - setTimeout(action(() => { - !templateDataDoc[metadataFieldName] && data instanceof ObjectField && (Doc.GetProto(templateDataDoc)[metadataFieldName] = ObjectField.MakeCopy(data)); - let layout = StrCast(fieldLayoutDoc.layout).replace(/fieldKey={"[^"]*"}/, `fieldKey={"${metadataFieldName}"}`); - let layoutDelegate = Doc.Layout(fieldTemplate); - layoutDelegate.layout = layout; - fieldTemplate.layout = layoutDelegate !== fieldTemplate ? layoutDelegate : layout; - if (fieldTemplate.backgroundColor !== templateDataDoc.defaultBackgroundColor) fieldTemplate.defaultBackgroundColor = fieldTemplate.backgroundColor; - fieldTemplate.proto = templateDataDoc; - }), 0); + // setTimeout(action(() => { + !templateDataDoc[metadataFieldName] && data instanceof ObjectField && (Doc.GetProto(templateDataDoc)[metadataFieldName] = ObjectField.MakeCopy(data)); + let layout = StrCast(fieldLayoutDoc.layout).replace(/fieldKey={"[^"]*"}/, `fieldKey={"${metadataFieldName}"}`); + let layoutDelegate = Doc.Layout(fieldTemplate); + layoutDelegate.layout = layout; + fieldTemplate.layout = layoutDelegate !== fieldTemplate ? layoutDelegate : layout; + if (fieldTemplate.backgroundColor !== templateDataDoc.defaultBackgroundColor) fieldTemplate.defaultBackgroundColor = fieldTemplate.backgroundColor; + fieldTemplate.proto = templateDataDoc; + // }), 0); return true; } -- cgit v1.2.3-70-g09d2 From b8fd7bee09ad06ca6ae8fb9e607bf54ca42d0fad Mon Sep 17 00:00:00 2001 From: kimdahey Date: Wed, 27 Nov 2019 16:45:57 -0500 Subject: committing:w --- src/client/views/nodes/FormattedTextBox.scss | 45 +++++++++++++++------------- src/client/views/nodes/FormattedTextBox.tsx | 2 +- 2 files changed, 26 insertions(+), 21 deletions(-) (limited to 'src') diff --git a/src/client/views/nodes/FormattedTextBox.scss b/src/client/views/nodes/FormattedTextBox.scss index 77cdd3d42..ab4a08d62 100644 --- a/src/client/views/nodes/FormattedTextBox.scss +++ b/src/client/views/nodes/FormattedTextBox.scss @@ -50,6 +50,7 @@ width: 100%; height: 100%; } + .formattedTextBox-sidebar-handle { position: absolute; top: calc(50% - 17.5px); @@ -58,19 +59,23 @@ background: lightgray; border-radius: 20px; } -.formattedTextBox-cont > .formattedTextBox-sidebar-handle { + +.formattedTextBox-cont>.formattedTextBox-sidebar-handle { right: 0; left: unset; } -.formattedTextBox-sidebar, .formattedTextBox-sidebar-inking { - border-left: dashed 1px black; - height: 100%; + +.formattedTextBox-sidebar, +.formattedTextBox-sidebar-inking { + border-left: dashed 1px black; + height: 100%; display: inline-block; position: absolute; right: 0; - > .formattedTextBox-sidebar-handle { - right:unset; - left:-5; + + >.formattedTextBox-sidebar-handle { + right: unset; + left: -5; } } @@ -222,7 +227,7 @@ footnote::after { } ; - font-size: 24; + font-size: 18; ul, ol { @@ -238,7 +243,7 @@ footnote::after { } ; - font-size: 18; + font-size: 14; ul, ol { @@ -254,7 +259,7 @@ footnote::after { } ; - font-size: 14; + font-size: 12; ul, ol { @@ -360,70 +365,70 @@ footnote::after { } .decimal1:before { - content: counter(deci1) ") "; + content: counter(deci1) ". "; counter-increment: deci1; display: inline-block; min-width: 30; } .decimal2:before { - content: counter(deci1) "."counter(deci2) ") "; + content: counter(deci1) "."counter(deci2) ". "; counter-increment: deci2; display: inline-block; min-width: 35 } .decimal3:before { - content: counter(deci1) "."counter(deci2) "."counter(deci3) ") "; + content: counter(deci1) "."counter(deci2) "."counter(deci3) ". "; counter-increment: deci3; display: inline-block; min-width: 35 } .decimal4:before { - content: counter(deci1) "."counter(deci2) "."counter(deci3) "."counter(deci4) ") "; + content: counter(deci1) "."counter(deci2) "."counter(deci3) "."counter(deci4) ". "; counter-increment: deci4; display: inline-block; min-width: 40 } .decimal5:before { - content: counter(deci1) "."counter(deci2) "."counter(deci3) "."counter(deci4) "."counter(deci5) ") "; + content: counter(deci1) "."counter(deci2) "."counter(deci3) "."counter(deci4) "."counter(deci5) ". "; counter-increment: deci5; display: inline-block; min-width: 40 } .decimal6:before { - content: counter(deci1) "."counter(deci2) "."counter(deci3) "."counter(deci4) "."counter(deci5) "."counter(deci6) ") "; + content: counter(deci1) "."counter(deci2) "."counter(deci3) "."counter(deci4) "."counter(deci5) "."counter(deci6) ". "; counter-increment: deci6; display: inline-block; min-width: 45 } .decimal7:before { - content: counter(deci1) "."counter(deci2) "."counter(deci3) "."counter(deci4) "."counter(deci5) "."counter(deci6) "."counter(deci7) ") "; + content: counter(deci1) "."counter(deci2) "."counter(deci3) "."counter(deci4) "."counter(deci5) "."counter(deci6) "."counter(deci7) ". "; counter-increment: deci7; display: inline-block; min-width: 50 } .upper-alpha:before { - content: counter(deci1) "."counter(ualph, upper-alpha) ") "; + content: counter(deci1) "."counter(ualph, upper-alpha) ". "; counter-increment: ualph; display: inline-block; min-width: 35 } .lower-roman:before { - content: counter(deci1) "."counter(ualph, upper-alpha) "."counter(lroman, lower-roman) ") "; + content: counter(deci1) "."counter(ualph, upper-alpha) "."counter(lroman, lower-roman) ". "; counter-increment: lroman; display: inline-block; min-width: 50 } .lower-alpha:before { - content: counter(deci1) "."counter(ualph, upper-alpha) "."counter(lroman, lower-roman) "."counter(lalpha, lower-alpha) ") "; + content: counter(deci1) "."counter(ualph, upper-alpha) "."counter(lroman, lower-roman) "."counter(lalpha, lower-alpha) ". "; counter-increment: lalpha; display: inline-block; min-width: 35 diff --git a/src/client/views/nodes/FormattedTextBox.tsx b/src/client/views/nodes/FormattedTextBox.tsx index 657b4b406..bad912944 100644 --- a/src/client/views/nodes/FormattedTextBox.tsx +++ b/src/client/views/nodes/FormattedTextBox.tsx @@ -1091,7 +1091,7 @@ export class FormattedTextBox extends DocAnnotatableComponent<(FieldViewProps & e.stopPropagation(); }} > + style={{ color: this._recording ? "red" : "blue", opacity: this._recording ? 1 : 0.5 }} icon={"microphone"} size="sm" />
    ); -- cgit v1.2.3-70-g09d2 From cb8d81b4a0963004ecc60122783716ce8a587d0b Mon Sep 17 00:00:00 2001 From: Bob Zeleznik Date: Wed, 27 Nov 2019 18:39:20 -0500 Subject: a couple of fixes for bulleted text list interactions. --- src/client/views/nodes/FormattedTextBox.scss | 241 +++------------------------ src/client/views/nodes/FormattedTextBox.tsx | 27 +-- 2 files changed, 42 insertions(+), 226 deletions(-) (limited to 'src') diff --git a/src/client/views/nodes/FormattedTextBox.scss b/src/client/views/nodes/FormattedTextBox.scss index ab4a08d62..984d51791 100644 --- a/src/client/views/nodes/FormattedTextBox.scss +++ b/src/client/views/nodes/FormattedTextBox.scss @@ -31,8 +31,8 @@ flex-direction: row; .formattedTextBox-dictation { - height: 20px; - width: 20px; + height: 12px; + width: 10px; top: 0px; left: 0px; position: absolute; @@ -219,218 +219,27 @@ footnote::after { padding-left: 0px; } - .decimal1-ol { - counter-reset: deci1; - - p { - display: inline - } - - ; - font-size: 18; - - ul, - ol { - padding-left: 30px; - } - } - - .decimal2-ol { - counter-reset: deci2; - - p { - display: inline - } - - ; - font-size: 14; - - ul, - ol { - padding-left: 30px; - } - } - - .decimal3-ol { - counter-reset: deci3; - - p { - display: inline - } - - ; - font-size: 12; - - ul, - ol { - padding-left: 30px; - } - } - - .decimal4-ol { - counter-reset: deci4; - - p { - display: inline - } - - ; - font-size: 10; - - ul, - ol { - padding-left: 30px; - } - } - - .decimal5-ol { - counter-reset: deci5; - - p { - display: inline - } - - ; - font-size: 10; - - ul, - ol { - padding-left: 30px; - } - } - - .decimal6-ol { - counter-reset: deci6; - - p { - display: inline - } - - ; - font-size: 10; - - ul, - ol { - padding-left: 30px; - } - } - - .decimal7-ol { - counter-reset: deci7; - - p { - display: inline - } - - ; - font-size: 10; - - ul, - ol { - padding-left: 30px; - } - } - - .upper-alpha-ol { - counter-reset: ualph; - - p { - display: inline - } - - ; - font-size: 18; - } - - .lower-roman-ol { - counter-reset: lroman; - - p { - display: inline - } - - ; - font-size: 14; - } - - .lower-alpha-ol { - counter-reset: lalpha; - - p { - display: inline - } - - ; - font-size: 10; - } - - .decimal1:before { - content: counter(deci1) ". "; - counter-increment: deci1; - display: inline-block; - min-width: 30; - } - - .decimal2:before { - content: counter(deci1) "."counter(deci2) ". "; - counter-increment: deci2; - display: inline-block; - min-width: 35 - } - - .decimal3:before { - content: counter(deci1) "."counter(deci2) "."counter(deci3) ". "; - counter-increment: deci3; - display: inline-block; - min-width: 35 - } - - .decimal4:before { - content: counter(deci1) "."counter(deci2) "."counter(deci3) "."counter(deci4) ". "; - counter-increment: deci4; - display: inline-block; - min-width: 40 - } - - .decimal5:before { - content: counter(deci1) "."counter(deci2) "."counter(deci3) "."counter(deci4) "."counter(deci5) ". "; - counter-increment: deci5; - display: inline-block; - min-width: 40 - } - - .decimal6:before { - content: counter(deci1) "."counter(deci2) "."counter(deci3) "."counter(deci4) "."counter(deci5) "."counter(deci6) ". "; - counter-increment: deci6; - display: inline-block; - min-width: 45 - } - - .decimal7:before { - content: counter(deci1) "."counter(deci2) "."counter(deci3) "."counter(deci4) "."counter(deci5) "."counter(deci6) "."counter(deci7) ". "; - counter-increment: deci7; - display: inline-block; - min-width: 50 - } - - .upper-alpha:before { - content: counter(deci1) "."counter(ualph, upper-alpha) ". "; - counter-increment: ualph; - display: inline-block; - min-width: 35 - } - - .lower-roman:before { - content: counter(deci1) "."counter(ualph, upper-alpha) "."counter(lroman, lower-roman) ". "; - counter-increment: lroman; - display: inline-block; - min-width: 50 - } - - .lower-alpha:before { - content: counter(deci1) "."counter(ualph, upper-alpha) "."counter(lroman, lower-roman) "."counter(lalpha, lower-alpha) ". "; - counter-increment: lalpha; - display: inline-block; - min-width: 35 - } + .decimal1-ol { counter-reset: deci1; p { display: inline }; ul, ol { padding-left: 30px; } } + .decimal2-ol { counter-reset: deci2; p { display: inline }; font-size: smaller; ul, ol { padding-left: 30px; } } + .decimal3-ol { counter-reset: deci3; p { display: inline }; font-size: smaller; ul, ol { padding-left: 30px; } } + .decimal4-ol { counter-reset: deci4; p { display: inline }; font-size: smaller; ul, ol { padding-left: 30px; } } + .decimal5-ol { counter-reset: deci5; p { display: inline }; font-size: smaller; ul, ol { padding-left: 30px; } } + .decimal6-ol { counter-reset: deci6; p { display: inline }; font-size: smaller; ul, ol { padding-left: 30px; } } + .decimal7-ol { counter-reset: deci7; p { display: inline }; font-size: smaller; ul, ol { padding-left: 30px; } } + + .upper-alpha-ol { counter-reset: ualph; p { display: inline}; font-size: smaller; } + .lower-roman-ol { counter-reset: lroman; p { display: inline}; font-size: smaller; } + .lower-alpha-ol { counter-reset: lalpha; p { display: inline}; font-size: smaller; } + + .decimal1:before { counter-increment: deci1; display: inline-block; content: counter(deci1) ". "; } + .decimal2:before { counter-increment: deci2; display: inline-block; content: counter(deci1) "."counter(deci2) ". "; } + .decimal3:before { counter-increment: deci3; display: inline-block; content: counter(deci1) "."counter(deci2) "."counter(deci3) ". "; } + .decimal4:before { counter-increment: deci4; display: inline-block; content: counter(deci1) "."counter(deci2) "."counter(deci3) "."counter(deci4) ". "; } + .decimal5:before { counter-increment: deci5; display: inline-block; content: counter(deci1) "."counter(deci2) "."counter(deci3) "."counter(deci4) "."counter(deci5) ". "; } + .decimal6:before { counter-increment: deci5; display: inline-block; content: counter(deci1) "."counter(deci2) "."counter(deci3) "."counter(deci4) "."counter(deci5) "."counter(deci6) ". "; } + .decimal7:before { counter-increment: deci5; display: inline-block; content: counter(deci1) "."counter(deci2) "."counter(deci3) "."counter(deci4) "."counter(deci5) "."counter(deci6) "."counter(deci7) ". "; } + + .upper-alpha:before { counter-increment: ualph; display: inline-block; content: counter(deci1) "."counter(ualph, upper-alpha) ". "; } + .lower-roman:before { counter-increment: lroman; display: inline-block; content: counter(deci1) "."counter(ualph, upper-alpha) "."counter(lroman, lower-roman) ". "; } + .lower-alpha:before { counter-increment: lalpha; display: inline-block; content: counter(deci1) "."counter(ualph, upper-alpha) "."counter(lroman, lower-roman) "."counter(lalpha, lower-alpha) ". "; } } \ No newline at end of file diff --git a/src/client/views/nodes/FormattedTextBox.tsx b/src/client/views/nodes/FormattedTextBox.tsx index bad912944..876f390d9 100644 --- a/src/client/views/nodes/FormattedTextBox.tsx +++ b/src/client/views/nodes/FormattedTextBox.tsx @@ -919,24 +919,30 @@ export class FormattedTextBox extends DocAnnotatableComponent<(FieldViewProps & } // this hackiness handles clicking on the list item bullets to do expand/collapse. the bullets are ::before pseudo elements so there's no real way to hit test against them. - hitBulletTargets(x: number, y: number, offsetX: number, select: boolean = false) { + hitBulletTargets(x: number, y: number, offsetX: number, select: boolean, highlightOnly = false) { clearStyleSheetRules(FormattedTextBox._bulletStyleSheet); if (this.props.isSelected(true) && offsetX < 40) { let pos = this._editorView!.posAtCoords({ left: x, top: y }); if (pos && pos.pos > 0) { let node = this._editorView!.state.doc.nodeAt(pos.pos); - let node2 = node && node.type === schema.nodes.paragraph ? this._editorView!.state.doc.nodeAt(pos.pos - 1) : undefined; - if (node === this._nodeClicked && node2 && (node2.type === schema.nodes.ordered_list || node2.type === schema.nodes.list_item)) { + let node2 = node?.type === schema.nodes.paragraph ? this._editorView!.state.doc.nodeAt(pos.pos - 1) : undefined; + if ((node === this._nodeClicked || highlightOnly) && (node2?.type === schema.nodes.ordered_list || node2?.type === schema.nodes.list_item)) { let hit = this._editorView!.domAtPos(pos.pos).node as any; // let beforeEle = document.querySelector("." + hit.className) as Element; let before = hit ? window.getComputedStyle(hit, ':before') : undefined; let beforeWidth = before ? Number(before.getPropertyValue('width').replace("px", "")) : undefined; - if (beforeWidth && offsetX < beforeWidth) { + if (beforeWidth && offsetX < beforeWidth * .9) { let ol = this._editorView!.state.doc.nodeAt(pos.pos - 2) ? this._editorView!.state.doc.nodeAt(pos.pos - 2) : undefined; - if (ol && ol.type === schema.nodes.ordered_list && select) { - this._editorView!.dispatch(this._editorView!.state.tr.setSelection(new NodeSelection(this._editorView!.state.doc.resolve(pos.pos - 2)))); - addStyleSheetRule(FormattedTextBox._bulletStyleSheet, hit.className + ":before", { background: "gray" }); + if (ol?.type === schema.nodes.ordered_list && select) { + if (!highlightOnly) { + this._editorView!.dispatch(this._editorView!.state.tr.setSelection(new NodeSelection(this._editorView!.state.doc.resolve(pos.pos - 2)))); + } + addStyleSheetRule(FormattedTextBox._bulletStyleSheet, hit.className + ":before", { background: "lightgray" }); } else { - this._editorView!.dispatch(this._editorView!.state.tr.setNodeMarkup(pos.pos - 1, node2.type, { ...node2.attrs, visibility: !node2.attrs.visibility })); + if (highlightOnly) { + addStyleSheetRule(FormattedTextBox._bulletStyleSheet, hit.className + ":before", { background: "lightgray" }); + } else { + this._editorView!.dispatch(this._editorView!.state.tr.setNodeMarkup(pos.pos - 1, node2.type, { ...node2.attrs, visibility: !node2.attrs.visibility })); + } } } } @@ -1045,6 +1051,7 @@ export class FormattedTextBox extends DocAnnotatableComponent<(FieldViewProps & onKeyDown={this.onKeyPress} onFocus={this.onFocused} onClick={this.onClick} + onPointerMove={e => this.hitBulletTargets(e.clientX, e.clientY, e.nativeEvent.offsetX, e.shiftKey, true)} onBlur={this.onBlur} onPointerUp={this.onPointerUp} onPointerDown={this.onPointerDown} @@ -1062,10 +1069,10 @@ export class FormattedTextBox extends DocAnnotatableComponent<(FieldViewProps &
    this.props.PanelHeight()} + PanelHeight={this.props.PanelHeight} PanelWidth={() => this.sidebarWidth} annotationsKey={this.annotationsKey} - isAnnotationOverlay={true} + isAnnotationOverlay={false} focus={this.props.focus} isSelected={this.props.isSelected} select={emptyFunction} -- cgit v1.2.3-70-g09d2 From 68f49ef5daf3bf5c47d1d21c8f1cd2097947d071 Mon Sep 17 00:00:00 2001 From: Bob Zeleznik Date: Thu, 28 Nov 2019 01:04:59 -0500 Subject: small fixes to text for nested prosemirrors and firefox. --- src/client/util/RichTextSchema.tsx | 1 + src/client/views/Main.scss | 12 ++++++------ src/client/views/nodes/FormattedTextBox.tsx | 6 +++--- 3 files changed, 10 insertions(+), 9 deletions(-) (limited to 'src') diff --git a/src/client/util/RichTextSchema.tsx b/src/client/util/RichTextSchema.tsx index 0a717dff1..506c9767f 100644 --- a/src/client/util/RichTextSchema.tsx +++ b/src/client/util/RichTextSchema.tsx @@ -684,6 +684,7 @@ export class DashDocView { DocServer.GetRefField(node.attrs.docid).then(async dashDoc => { if (dashDoc instanceof Doc) { self._dashDoc = dashDoc; + dashDoc.hideSidebar = true; if (node.attrs.width !== dashDoc.width + "px" || node.attrs.height !== dashDoc.height + "px") { view.dispatch(view.state.tr.setNodeMarkup(getPos(), null, { ...node.attrs, width: dashDoc.width + "px", height: dashDoc.height + "px" })); } diff --git a/src/client/views/Main.scss b/src/client/views/Main.scss index 3b66160fb..f435821df 100644 --- a/src/client/views/Main.scss +++ b/src/client/views/Main.scss @@ -13,12 +13,12 @@ body { left: 0; } -div { - user-select: none; - -moz-user-select: none; - -webkit-user-select: none; - -ms-user-select: none; -} +// div { +// user-select: none; +// -moz-user-select: none; +// -webkit-user-select: none; +// -ms-user-select: none; +// } .jsx-parser { diff --git a/src/client/views/nodes/FormattedTextBox.tsx b/src/client/views/nodes/FormattedTextBox.tsx index 876f390d9..c8a629984 100644 --- a/src/client/views/nodes/FormattedTextBox.tsx +++ b/src/client/views/nodes/FormattedTextBox.tsx @@ -806,8 +806,8 @@ export class FormattedTextBox extends DocAnnotatableComponent<(FieldViewProps & if (selectOnLoad) { FormattedTextBox.SelectOnLoad = ""; this.props.select(false); + this._editorView!.focus(); } - this._editorView!.focus(); // add user mark for any first character that was typed since the user mark that gets set in KeyPress won't have been called yet. this._editorView!.state.storedMarks = [...(this._editorView!.state.storedMarks ? this._editorView!.state.storedMarks : []), schema.marks.user_mark.create({ userid: Doc.CurrentUserEmail, modified: Math.round(Date.now() / 1000 / 5) })]; } @@ -1064,7 +1064,7 @@ export class FormattedTextBox extends DocAnnotatableComponent<(FieldViewProps &
    - {this.sidebarWidthPercent === "0%" ? + {this.props.Document.hideSidebar ? (null) : this.sidebarWidthPercent === "0%" ?
    e.stopPropagation()} onClick={e => this.toggleSidebar()} /> :
    @@ -1081,7 +1081,7 @@ export class FormattedTextBox extends DocAnnotatableComponent<(FieldViewProps & whenActiveChanged={this.whenActiveChanged} removeDocument={this.removeDocument} moveDocument={this.moveDocument} - addDocument={this.addDocument} + addDocument={(doc:Doc) => { doc.hideSidebar = true; return this.addDocument(doc); }} CollectionView={undefined} ScreenToLocalTransform={() => this.props.ScreenToLocalTransform().translate(-(this.props.PanelWidth() - this.sidebarWidth), 0)} ruleProvider={undefined} -- cgit v1.2.3-70-g09d2 From 77ee66de66a411f79bbbc036d379d09be38d172f Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Mon, 2 Dec 2019 12:12:58 -0500 Subject: further cleanup --- src/Utils.ts | 2 - src/client/util/ClientDiagnostics.ts | 18 +-- .../util/Import & Export/DirectoryImportBox.tsx | 3 +- src/client/views/MainView.tsx | 2 +- src/client/views/collections/CollectionSubView.tsx | 6 +- src/server/ApiManagers/DeleteManager.ts | 1 - src/server/ApiManagers/GeneralGoogleManager.ts | 15 +-- src/server/ApiManagers/PDFManager.ts | 134 ++++++++++----------- src/server/ApiManagers/UploadManager.ts | 43 ++++--- src/server/DashUploadUtils.ts | 18 ++- src/server/RouteManager.ts | 14 +++ .../authentication/models/current_user_utils.ts | 3 +- src/server/credentials/test.json | 14 --- 13 files changed, 125 insertions(+), 148 deletions(-) delete mode 100644 src/server/credentials/test.json (limited to 'src') diff --git a/src/Utils.ts b/src/Utils.ts index b60e9e023..2543743a4 100644 --- a/src/Utils.ts +++ b/src/Utils.ts @@ -2,8 +2,6 @@ import v4 = require('uuid/v4'); import v5 = require("uuid/v5"); import { Socket } from 'socket.io'; import { Message } from './server/Message'; -import { EventEmitter } from 'events'; -import { ConsoleColors } from './server/ActionUtilities'; export namespace Utils { diff --git a/src/client/util/ClientDiagnostics.ts b/src/client/util/ClientDiagnostics.ts index 7eef935fd..0a213aa1c 100644 --- a/src/client/util/ClientDiagnostics.ts +++ b/src/client/util/ClientDiagnostics.ts @@ -12,18 +12,22 @@ export namespace ClientDiagnostics { serverPolls--; }, 1000 * 15); - let executed = false; - const handle = async () => { + let solrHandle: NodeJS.Timeout | undefined; + const handler = async () => { const response = await fetch("/solrHeartbeat"); if (!(await response.json()).running) { - !executed && alert("Looks like SOLR is not running on your machine."); - executed = true; - clearInterval(solrHandle); + if (!executed) { + alert("Looks like SOLR is not running on your machine."); + executed = true; + solrHandle && clearInterval(solrHandle); + } } }; - await handle(); - const solrHandle = setInterval(handle, 1000 * 15); + await handler(); + if (!executed) { + solrHandle = setInterval(handler, 1000 * 15); + } } diff --git a/src/client/util/Import & Export/DirectoryImportBox.tsx b/src/client/util/Import & Export/DirectoryImportBox.tsx index b5e806a97..104d9e099 100644 --- a/src/client/util/Import & Export/DirectoryImportBox.tsx +++ b/src/client/util/Import & Export/DirectoryImportBox.tsx @@ -106,7 +106,8 @@ export default class DirectoryImportBox extends React.Component runInAction(() => this.phase = `Internal: uploading ${this.quota - this.completed} files to Dash...`); - const uploads = await BatchedArray.from(validated, { batchSize: 15 }).batchedMapAsync(async (batch, collector) => { + const batched = BatchedArray.from(validated, { batchSize: 15 }); + const uploads = await batched.batchedMapAsync(async (batch, collector) => { const formData = new FormData(); batch.forEach(file => { diff --git a/src/client/views/MainView.tsx b/src/client/views/MainView.tsx index 5231075a1..85dfd8be2 100644 --- a/src/client/views/MainView.tsx +++ b/src/client/views/MainView.tsx @@ -4,7 +4,7 @@ import { faMusic, faObjectGroup, faPause, faMousePointer, faPenNib, faFileAudio, faPen, faEraser, faPlay, faPortrait, faRedoAlt, faThumbtack, faTree, faTv, faUndoAlt, faHighlighter, faMicrophone, faCompressArrowsAlt } from '@fortawesome/free-solid-svg-icons'; import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; -import { action, computed, configure, observable, reaction, runInAction, autorun } from 'mobx'; +import { action, computed, configure, observable, reaction, runInAction } from 'mobx'; import { observer } from 'mobx-react'; import "normalize.css"; import * as React from 'react'; diff --git a/src/client/views/collections/CollectionSubView.tsx b/src/client/views/collections/CollectionSubView.tsx index a1bd1527e..a1ae77fef 100644 --- a/src/client/views/collections/CollectionSubView.tsx +++ b/src/client/views/collections/CollectionSubView.tsx @@ -6,7 +6,7 @@ import { Id } from "../../../new_fields/FieldSymbols"; import { List } from "../../../new_fields/List"; import { listSpec } from "../../../new_fields/Schema"; import { ScriptField } from "../../../new_fields/ScriptField"; -import { Cast, StrCast } from "../../../new_fields/Types"; +import { Cast } from "../../../new_fields/Types"; import { CurrentUserUtils } from "../../../server/authentication/models/current_user_utils"; import { Utils } from "../../../Utils"; import { DocServer } from "../../DocServer"; @@ -279,9 +279,9 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { formData.append('file', file); let dropFileName = file ? file.name : "-empty-"; promises.push(Networking.PostFormDataToServer("/upload", formData).then(results => { - results.map(action((file: any) => { + results.map(action(({ clientAccessPath }: any) => { let full = { ...options, nativeWidth: type.indexOf("video") !== -1 ? 600 : 300, width: 300, title: dropFileName }; - let pathname = Utils.prepend(file.clientAccessPath); + let pathname = Utils.prepend(clientAccessPath); Docs.Get.DocumentFromType(type, pathname, full).then(doc => { doc && (Doc.GetProto(doc).fileUpload = path.basename(pathname).replace("upload_", "").replace(/\.[a-z0-9]*$/, "")); doc && this.props.addDocument(doc); diff --git a/src/server/ApiManagers/DeleteManager.ts b/src/server/ApiManagers/DeleteManager.ts index f58a28ce5..71818c673 100644 --- a/src/server/ApiManagers/DeleteManager.ts +++ b/src/server/ApiManagers/DeleteManager.ts @@ -56,7 +56,6 @@ export default class DeleteManager extends ApiManager { } }); - } } diff --git a/src/server/ApiManagers/GeneralGoogleManager.ts b/src/server/ApiManagers/GeneralGoogleManager.ts index 171912185..629684e0c 100644 --- a/src/server/ApiManagers/GeneralGoogleManager.ts +++ b/src/server/ApiManagers/GeneralGoogleManager.ts @@ -20,8 +20,7 @@ export default class GeneralGoogleManager extends ApiManager { method: Method.GET, subscription: "/readGoogleAccessToken", onValidation: async ({ user, res }) => { - const userId = user.id; - const token = await GoogleApiServerUtils.retrieveAccessToken(userId); + const token = await GoogleApiServerUtils.retrieveAccessToken(user.id); if (!token) { return res.send(GoogleApiServerUtils.generateAuthenticationUrl()); } @@ -37,18 +36,6 @@ export default class GeneralGoogleManager extends ApiManager { } }); - register({ - method: Method.GET, - subscription: "/deleteWithGoogleCredentials", - onValidation: async ({ res, isRelease }) => { - if (isRelease) { - return _permission_denied(res, deletionPermissionError); - } - await Database.Auxiliary.GoogleAuthenticationToken.DeleteAll(); - res.redirect("/delete"); - } - }); - register({ method: Method.POST, subscription: new RouteSubscriber("/googleDocs").add("sector", "action"), diff --git a/src/server/ApiManagers/PDFManager.ts b/src/server/ApiManagers/PDFManager.ts index 4bd750aaf..151b48dd9 100644 --- a/src/server/ApiManagers/PDFManager.ts +++ b/src/server/ApiManagers/PDFManager.ts @@ -4,10 +4,11 @@ import RouteSubscriber from "../RouteSubscriber"; import { exists, createReadStream, createWriteStream } from "fs"; import * as Pdfjs from 'pdfjs-dist'; import { createCanvas } from "canvas"; -const probe = require("probe-image-size"); +const imageSize = require("probe-image-size"); import * as express from "express"; import * as path from "path"; import { Directory, serverPathToFile, clientPathToFile } from "./UploadManager"; +import { ConsoleColors } from "../ActionUtilities"; export default class PDFManager extends ApiManager { @@ -16,84 +17,77 @@ export default class PDFManager extends ApiManager { register({ method: Method.GET, subscription: new RouteSubscriber("thumbnail").add("filename"), - onValidation: ({ req, res }) => { - let filename = req.params.filename; - let noExt = filename.substring(0, filename.length - ".png".length); - let pagenumber = parseInt(noExt.split('-')[1]); - return new Promise(resolve => { - const path = serverPathToFile(Directory.pdf_thumbnails, filename); - exists(path, (exists: boolean) => { - console.log(`${path} ${exists ? "exists" : "does not exist"}`); - if (exists) { - let input = createReadStream(path); - probe(input, (err: any, { width, height }: any) => { - if (err) { - console.log(err); - console.log(`error on ${filename}`); - return; - } - res.send({ - path: clientPathToFile(Directory.pdf_thumbnails, filename), - width, - height - }); - }); - } - else { - const name = filename.substring(0, filename.length - noExt.split('-')[1].length - ".PNG".length - 1) + ".pdf"; - LoadPage(serverPathToFile(Directory.pdfs, name), pagenumber, res); - } - resolve(); - }); - }); - } + onValidation: ({ req, res }) => getOrCreateThumbnail(req.params.filename, res) }); - function LoadPage(file: string, pageNumber: number, res: express.Response) { - console.log(file); - Pdfjs.getDocument(file).promise - .then((pdf: Pdfjs.PDFDocumentProxy) => { - let factory = new NodeCanvasFactory(); - console.log(pageNumber); - pdf.getPage(pageNumber).then((page: Pdfjs.PDFPageProxy) => { - console.log("reading " + page); - let viewport = page.getViewport(1 as any); - let canvasAndContext = factory.create(viewport.width, viewport.height); - let renderContext = { - canvasContext: canvasAndContext.context, - canvasFactory: factory, - viewport - }; - console.log("read " + pageNumber); + } - page.render(renderContext).promise - .then(() => { - console.log("saving " + pageNumber); - let stream = canvasAndContext.canvas.createPNGStream(); - let filenames = path.basename(file).split("."); - const pngFile = serverPathToFile(Directory.pdf_thumbnails, `${filenames[0]}-${pageNumber}.png`); - let out = createWriteStream(pngFile); - stream.pipe(out); - out.on("finish", () => { - console.log(`Success! Saved to ${pngFile}`); - res.send({ - path: pngFile, - width: viewport.width, - height: viewport.height - }); - }); - }, (reason: string) => { - console.error(reason + ` ${pageNumber}`); - }); +} + +function getOrCreateThumbnail(thumbnailName: string, res: express.Response) { + const noExtension = thumbnailName.substring(0, thumbnailName.length - ".png".length); + const pageString = noExtension.split('-')[1]; + const pageNumber = parseInt(pageString); + return new Promise(resolve => { + const path = serverPathToFile(Directory.pdf_thumbnails, thumbnailName); + exists(path, (exists: boolean) => { + if (exists) { + let existingThumbnail = createReadStream(path); + imageSize(existingThumbnail, (err: any, { width, height }: any) => { + if (err) { + console.log(ConsoleColors.Red, `In PDF thumbnail response, unable to determine dimensions of ${thumbnailName}:`); + console.log(err); + return; + } + res.send({ + path: clientPathToFile(Directory.pdf_thumbnails, thumbnailName), + width, + height }); }); - } - - } + } else { + const offset = thumbnailName.length - pageString.length - 5; + const name = thumbnailName.substring(0, offset) + ".pdf"; + const path = serverPathToFile(Directory.pdfs, name); + CreateThumbnail(path, pageNumber, res); + } + resolve(); + }); + }); +} +async function CreateThumbnail(file: string, pageNumber: number, res: express.Response) { + const documentProxy = await Pdfjs.getDocument(file).promise; + const factory = new NodeCanvasFactory(); + const page = await documentProxy.getPage(pageNumber); + const viewport = page.getViewport(1 as any); + const { canvas, context } = factory.create(viewport.width, viewport.height); + const renderContext = { + canvasContext: context, + canvasFactory: factory, + viewport + }; + await page.render(renderContext).promise; + const pngStream = canvas.createPNGStream(); + const filenames = path.basename(file).split("."); + const pngFile = serverPathToFile(Directory.pdf_thumbnails, `${filenames[0]}-${pageNumber}.png`); + const out = createWriteStream(pngFile); + pngStream.pipe(out); + out.on("finish", () => { + res.send({ + path: pngFile, + width: viewport.width, + height: viewport.height + }); + }); + out.on("error", error => { + console.log(ConsoleColors.Red, `In PDF thumbnail creation, encountered the following error when piping ${pngFile}:`); + console.log(error); + }); } class NodeCanvasFactory { + create = (width: number, height: number) => { var canvas = createCanvas(width, height); var context = canvas.getContext('2d'); diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index 2f76871a6..80ae0ad61 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -38,6 +38,27 @@ export default class UploadManager extends ApiManager { protected initialize(register: Registration): void { + register({ + method: Method.POST, + subscription: "/upload", + onValidation: async ({ req, res }) => { + let form = new formidable.IncomingForm(); + form.uploadDir = pathToDirectory(Directory.parsed_files); + form.keepExtensions = true; + return new Promise(resolve => { + form.parse(req, async (_err, _fields, files) => { + let results: any[] = []; + for (const key in files) { + const result = await DashUploadUtils.upload(files[key]); + result && results.push(result); + } + _success(res, results); + resolve(); + }); + }); + } + }); + register({ method: Method.POST, subscription: "/uploadDoc", @@ -142,28 +163,6 @@ export default class UploadManager extends ApiManager { } }); - - register({ - method: Method.POST, - subscription: "/upload", - onValidation: async ({ req, res }) => { - let form = new formidable.IncomingForm(); - form.uploadDir = pathToDirectory(Directory.parsed_files); - form.keepExtensions = true; - return new Promise(resolve => { - form.parse(req, async (_err, _fields, files) => { - let results: any[] = []; - for (const key in files) { - const result = await DashUploadUtils.upload(files[key]); - result && results.push(result); - } - _success(res, results); - resolve(); - }); - }); - } - }); - register({ method: Method.POST, subscription: "/inspectImage", diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index c831eb072..9ccc72e35 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -85,7 +85,8 @@ export namespace DashUploadUtils { return UploadPdf(path); } } - console.log(ConsoleColors.Red, `Ignoring unsupported file ${name} with upload type (${type}).`); + + console.log(ConsoleColors.Red, `Ignoring unsupported file (${name}) with upload type (${type}).`); return { clientAccessPath: undefined }; } @@ -169,17 +170,12 @@ export namespace DashUploadUtils { if (isLocal) { return results; } - const metadata = (await new Promise((resolve, reject) => { - request.head(source, async (error, res) => { - if (error) { - return reject(error); - } - resolve(res); - }); - })).headers; + const { headers } = (await new Promise((resolve, reject) => { + request.head(source, (error, res) => error ? reject(error) : resolve(res)); + })); return { - contentSize: parseInt(metadata[size]), - contentType: metadata[type], + contentSize: parseInt(headers[size]), + contentType: headers[type], ...results }; }; diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts index 3a20d5af5..7c49485f1 100644 --- a/src/server/RouteManager.ts +++ b/src/server/RouteManager.ts @@ -26,6 +26,8 @@ export interface RouteInitializer { onError?: OnError; } +const registered = new Map>(); + export default class RouteManager { private server: express.Express; private _isRelease: boolean; @@ -89,6 +91,18 @@ export default class RouteManager { } else { route = subscriber.build; } + const existing = registered.get(route); + if (existing) { + if (existing.has(method)) { + console.log(ConsoleColors.Red, `\nDuplicate registration error: already registered ${route} with Method[${method}]`); + console.log('Please remove duplicate registrations before continuing...\n'); + process.exit(0); + } + } else { + const specific = new Set(); + specific.add(method); + registered.set(route, specific); + } switch (method) { case Method.GET: this.server.get(route, supervised); diff --git a/src/server/authentication/models/current_user_utils.ts b/src/server/authentication/models/current_user_utils.ts index 052aa54a6..ac4462f78 100644 --- a/src/server/authentication/models/current_user_utils.ts +++ b/src/server/authentication/models/current_user_utils.ts @@ -1,4 +1,4 @@ -import { action, computed, observable, reaction, runInAction } from "mobx"; +import { action, computed, observable, reaction } from "mobx"; import * as rp from 'request-promise'; import { DocServer } from "../../../client/DocServer"; import { Docs } from "../../../client/documents/Documents"; @@ -11,7 +11,6 @@ import { listSpec } from "../../../new_fields/Schema"; import { ScriptField, ComputedField } from "../../../new_fields/ScriptField"; import { Cast, PromiseValue } from "../../../new_fields/Types"; import { Utils } from "../../../Utils"; -import { ButtonBox } from "../../../client/views/nodes/ButtonBox"; import { nullAudio } from "../../../new_fields/URLField"; import { DragManager } from "../../../client/util/DragManager"; import { InkingControl } from "../../../client/views/InkingControl"; diff --git a/src/server/credentials/test.json b/src/server/credentials/test.json deleted file mode 100644 index 0a032cc2d..000000000 --- a/src/server/credentials/test.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "installed": { - "client_id": "343179513178-ud6tvmh275r2fq93u9eesrnc66t6akh9.apps.googleusercontent.com", - "project_id": "quickstart-1565056383187", - "auth_uri": "https://accounts.google.com/o/oauth2/auth", - "token_uri": "https://oauth2.googleapis.com/token", - "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", - "client_secret": "w8KIFSc0MQpmUYHed4qEzn8b", - "redirect_uris": [ - "urn:ietf:wg:oauth:2.0:oob", - "http://localhost" - ] - } -} \ No newline at end of file -- cgit v1.2.3-70-g09d2 From 1280c005829cf49fd106fd872afcf4ed6593a2f6 Mon Sep 17 00:00:00 2001 From: bob Date: Mon, 2 Dec 2019 13:21:59 -0500 Subject: fixed inline text comments to highlight and hide/show. --- src/client/util/RichTextRules.ts | 12 +++--- src/client/util/RichTextSchema.tsx | 64 ++++++++++++++++++++++++++-- src/client/views/nodes/FormattedTextBox.scss | 12 ++++++ src/client/views/nodes/FormattedTextBox.tsx | 5 ++- 4 files changed, 81 insertions(+), 12 deletions(-) (limited to 'src') diff --git a/src/client/util/RichTextRules.ts b/src/client/util/RichTextRules.ts index ebb9bda8a..f4c44e5ce 100644 --- a/src/client/util/RichTextRules.ts +++ b/src/client/util/RichTextRules.ts @@ -147,16 +147,14 @@ export const inpRules = { new InputRule( new RegExp(/##\s$/), (state, match, start, end) => { + let target = Docs.Create.TextDocument({ width: 75, height: 35, autoHeight: true, fontSize: 9, title: "inline comment" }); let node = (state.doc.resolve(start) as any).nodeAfter; + let newNode = schema.nodes.dashComment.create({ docid: target[Id] }); + let dashDoc = schema.nodes.dashDoc.create({ width: 75, height: 35, title: "dashDoc", docid: target[Id], float: "right" }); let sm = state.storedMarks || undefined; - let target = Docs.Create.TextDocument({ width: 75, height: 35, autoHeight: true, fontSize: 9, title: "inline comment" }); - let replaced = node ? state.tr.insertText("â†", start).replaceRangeWith(start + 1, end + 1, schema.nodes.dashDoc.create({ - width: 75, height: 35, - title: "dashDoc", docid: target[Id], - float: "right" - })).setStoredMarks([...node.marks, ...(sm ? sm : [])]) : + let replaced = node ? state.tr.insert(start, newNode).replaceRangeWith(start + 1, end + 1, dashDoc).setStoredMarks([...node.marks, ...(sm ? sm : [])]) : state.tr; - return replaced.setSelection(new TextSelection(replaced.doc.resolve(end - 1))); + return replaced;//.setSelection(new NodeSelection(replaced.doc.resolve(end))); }), new InputRule( new RegExp(/\(\(/), diff --git a/src/client/util/RichTextSchema.tsx b/src/client/util/RichTextSchema.tsx index 506c9767f..522232e9f 100644 --- a/src/client/util/RichTextSchema.tsx +++ b/src/client/util/RichTextSchema.tsx @@ -107,6 +107,18 @@ export const nodes: { [index: string]: NodeSpec } = { group: "inline" }, + dashComment: { + attrs: { + docid: { default: "" }, + }, + inline: true, + group: "inline", + toDOM(node) { + const attrs = { style: `width: 40px` }; + return ["span", { ...node.attrs, ...attrs }, "â†"]; + }, + }, + star: { inline: true, attrs: { @@ -171,7 +183,8 @@ export const nodes: { [index: string]: NodeSpec } = { title: { default: null }, float: { default: "right" }, location: { default: "onRight" }, - docid: { default: "" } + hidden: { default: false }, + docid: { default: "" }, }, group: "inline", draggable: true, @@ -648,6 +661,38 @@ export class ImageResizeView { } } + +export class DashDocCommentView { + _collapsed: HTMLElement; + _view: any; + constructor(node: any, view: any, getPos: any) { + this._collapsed = document.createElement("span"); + this._collapsed.className = "formattedTextBox-inlineComment"; + this._collapsed.id = "DashDocCommentView-" + node.attrs.docid; + this._view = view; + this._collapsed.onpointerdown = (e: any) => { + let node = view.state.doc.nodeAt(getPos() + 1); + view.dispatch(view.state.tr. + setNodeMarkup(getPos() + 1, undefined, { ...node.attrs, hidden: node.attrs.hidden ? false : true })); // update the attrs + setTimeout(() => node.attrs.hidden && DocServer.GetRefField(node.attrs.docid).then(async dashDoc => dashDoc instanceof Doc && Doc.linkFollowHighlight(dashDoc)), 100); + } + this._collapsed.onpointerenter = (e: any) => { + let node = view.state.doc.nodeAt(getPos() + 1); + DocServer.GetRefField(node.attrs.docid).then(async dashDoc => dashDoc instanceof Doc && Doc.linkFollowHighlight(dashDoc)); + e.preventDefault(); + e.stopPropagation(); + }; + this._collapsed.onpointerleave = (e: any) => { + let node = view.state.doc.nodeAt(getPos() + 1); + DocServer.GetRefField(node.attrs.docid).then(async dashDoc => dashDoc instanceof Doc && Doc.linkFollowUnhighlight()); + e.preventDefault(); + e.stopPropagation(); + }; + (this as any).dom = this._collapsed; + } + selectNode() { } +} + export class DashDocView { _dashSpan: HTMLDivElement; _outer: HTMLElement; @@ -667,20 +712,33 @@ export class DashDocView { this._outer.style.position = "relative"; this._outer.style.width = node.attrs.width; this._outer.style.height = node.attrs.height; - this._outer.style.display = "inline-block"; - this._outer.style.overflow = "hidden"; + this._outer.style.display = node.attrs.hidden ? "none" : "inline-block"; + // this._outer.style.overflow = "hidden"; // bcz: not sure if this is needed. if it's used, then the doc doesn't highlight when you hover over a docComment (this._outer.style as any).float = node.attrs.float; this._dashSpan.style.width = node.attrs.width; this._dashSpan.style.height = node.attrs.height; this._dashSpan.style.position = "absolute"; this._dashSpan.style.display = "inline-block"; + this._dashSpan.style.borderWidth = "4"; let removeDoc = () => { let pos = getPos(); let ns = new NodeSelection(view.state.doc.resolve(pos)); view.dispatch(view.state.tr.setSelection(ns).deleteSelection()); return true; }; + this._dashSpan.onpointerleave = () => { + let ele = document.getElementById("DashDocCommentView-" + node.attrs.docid); + if (ele) { + (ele as HTMLDivElement).style.backgroundColor = ""; + } + } + this._dashSpan.onpointerenter = () => { + let ele = document.getElementById("DashDocCommentView-" + node.attrs.docid); + if (ele) { + (ele as HTMLDivElement).style.backgroundColor = "orange"; + } + } DocServer.GetRefField(node.attrs.docid).then(async dashDoc => { if (dashDoc instanceof Doc) { self._dashDoc = dashDoc; diff --git a/src/client/views/nodes/FormattedTextBox.scss b/src/client/views/nodes/FormattedTextBox.scss index 984d51791..4f72bb679 100644 --- a/src/client/views/nodes/FormattedTextBox.scss +++ b/src/client/views/nodes/FormattedTextBox.scss @@ -189,6 +189,17 @@ footnote::after { width: 0; } + +.formattedTextBox-inlineComment { + position: relative; + width: 40px; + height: 20px; +} + +.formattedTextBox-inlineComment::after { + content: "â†"; +} + .formattedTextBox-summarizer { opacity: 0.5; position: relative; @@ -198,6 +209,7 @@ footnote::after { .formattedTextBox-summarizer::after { content: "â†"; + font-weight: bold; } .formattedTextBox-summarizer-collapsed { diff --git a/src/client/views/nodes/FormattedTextBox.tsx b/src/client/views/nodes/FormattedTextBox.tsx index c8a629984..a5530f32d 100644 --- a/src/client/views/nodes/FormattedTextBox.tsx +++ b/src/client/views/nodes/FormattedTextBox.tsx @@ -27,7 +27,7 @@ import { DictationManager } from '../../util/DictationManager'; import { DragManager } from "../../util/DragManager"; import buildKeymap from "../../util/ProsemirrorExampleTransfer"; import { inpRules } from "../../util/RichTextRules"; -import { FootnoteView, ImageResizeView, DashDocView, OrderedListView, schema, SummarizedView } from "../../util/RichTextSchema"; +import { DashDocCommentView, FootnoteView, ImageResizeView, DashDocView, OrderedListView, schema, SummarizedView } from "../../util/RichTextSchema"; import { SelectionManager } from "../../util/SelectionManager"; import { TooltipLinkingMenu } from "../../util/TooltipLinkingMenu"; import { TooltipTextMenu } from "../../util/TooltipTextMenu"; @@ -787,6 +787,7 @@ export class FormattedTextBox extends DocAnnotatableComponent<(FieldViewProps & }, dispatchTransaction: this.dispatchTransaction, nodeViews: { + dashComment(node, view, getPos) { return new DashDocCommentView(node, view, getPos); }, dashDoc(node, view, getPos) { return new DashDocView(node, view, getPos, self); }, image(node, view, getPos) { return new ImageResizeView(node, view, getPos, self.props.addDocTab); }, star(node, view, getPos) { return new SummarizedView(node, view, getPos); }, @@ -1081,7 +1082,7 @@ export class FormattedTextBox extends DocAnnotatableComponent<(FieldViewProps & whenActiveChanged={this.whenActiveChanged} removeDocument={this.removeDocument} moveDocument={this.moveDocument} - addDocument={(doc:Doc) => { doc.hideSidebar = true; return this.addDocument(doc); }} + addDocument={(doc: Doc) => { doc.hideSidebar = true; return this.addDocument(doc); }} CollectionView={undefined} ScreenToLocalTransform={() => this.props.ScreenToLocalTransform().translate(-(this.props.PanelWidth() - this.sidebarWidth), 0)} ruleProvider={undefined} -- cgit v1.2.3-70-g09d2 From 59ab8a05866bbf065aa5078029a7bef1ebb708df Mon Sep 17 00:00:00 2001 From: bob Date: Mon, 2 Dec 2019 13:43:08 -0500 Subject: small highlighting fixes to inline text comments. --- src/client/views/nodes/FormattedTextBox.scss | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) (limited to 'src') diff --git a/src/client/views/nodes/FormattedTextBox.scss b/src/client/views/nodes/FormattedTextBox.scss index 4f72bb679..c06f38a6c 100644 --- a/src/client/views/nodes/FormattedTextBox.scss +++ b/src/client/views/nodes/FormattedTextBox.scss @@ -194,10 +194,12 @@ footnote::after { position: relative; width: 40px; height: 20px; -} - -.formattedTextBox-inlineComment::after { - content: "â†"; + &::after { + content: "→"; + } + &:hover { + background: orange; + } } .formattedTextBox-summarizer { @@ -205,11 +207,9 @@ footnote::after { position: relative; width: 40px; height: 20px; -} - -.formattedTextBox-summarizer::after { - content: "â†"; - font-weight: bold; + &::after { + content: "â†"; + } } .formattedTextBox-summarizer-collapsed { @@ -217,10 +217,9 @@ footnote::after { position: relative; width: 40px; height: 20px; -} - -.formattedTextBox-summarizer-collapsed::after { - content: "..."; + &::after { + content: "..."; + } } .ProseMirror { -- cgit v1.2.3-70-g09d2 From 62c888ad25c5580441eaf947a1a0f1638939b868 Mon Sep 17 00:00:00 2001 From: bob Date: Mon, 2 Dec 2019 15:23:28 -0500 Subject: fixed aliasing dragged documents that stay within the same tree view. (eg, the library of documents view). --- src/client/views/collections/CollectionTreeView.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src') diff --git a/src/client/views/collections/CollectionTreeView.tsx b/src/client/views/collections/CollectionTreeView.tsx index 83f09b8e9..c4b7e2d31 100644 --- a/src/client/views/collections/CollectionTreeView.tsx +++ b/src/client/views/collections/CollectionTreeView.tsx @@ -233,7 +233,7 @@ class TreeView extends React.Component { addDoc = (doc: Doc) => Doc.AddDocToList(this.dataDoc, this.fieldKey, doc) || addDoc(doc); } let movedDocs = (de.data.options === this.props.treeViewId ? de.data.draggedDocuments : de.data.droppedDocuments); - return (de.data.dropAction || de.data.userDropAction) ? + return ((de.data.dropAction && (de.data.options !== this.props.treeViewId)) || de.data.userDropAction) ? de.data.droppedDocuments.reduce((added, d) => addDoc(d) || added, false) : de.data.moveDocument ? movedDocs.reduce((added, d) => de.data.moveDocument(d, undefined, addDoc) || added, false) -- cgit v1.2.3-70-g09d2 From ef94ad7df2a087141ddb8d347d3e3c484ff7609b Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 3 Dec 2019 01:46:36 -0500 Subject: const linter rule and restored google docs push, fixed routing --- package.json | 3 +- src/Utils.ts | 91 +++--- src/client/DocServer.ts | 10 +- src/client/Network.ts | 2 +- src/client/apis/GoogleAuthenticationManager.tsx | 2 +- .../apis/google_docs/GoogleApiClientUtils.ts | 20 +- .../apis/google_docs/GooglePhotosClientUtils.ts | 16 +- src/client/apis/youtube/YoutubeBox.tsx | 102 +++---- src/client/cognitive_services/CognitiveServices.ts | 50 ++-- src/client/documents/Documents.ts | 80 ++--- src/client/northstar/dash-fields/HistogramField.ts | 8 +- .../model/binRanges/QuantitativeVisualBinRange.ts | 16 +- src/client/northstar/operations/BaseOperation.ts | 14 +- src/client/northstar/utils/MathUtil.ts | 44 +-- src/client/util/DictationManager.ts | 75 +++-- src/client/util/DocumentManager.ts | 28 +- src/client/util/DragManager.ts | 91 +++--- src/client/util/DropConverter.ts | 10 +- src/client/util/History.ts | 6 +- .../util/Import & Export/DirectoryImportBox.tsx | 58 ++-- .../util/Import & Export/ImportMetadataEntry.tsx | 6 +- src/client/util/InteractionUtils.ts | 18 +- src/client/util/LinkManager.ts | 62 ++-- src/client/util/ProsemirrorExampleTransfer.ts | 50 ++-- src/client/util/RichTextRules.ts | 78 ++--- src/client/util/RichTextSchema.tsx | 86 +++--- src/client/util/Scripting.ts | 38 +-- src/client/util/SearchUtil.ts | 24 +- src/client/util/SerializationHelper.ts | 9 +- src/client/util/SharingManager.tsx | 13 +- src/client/util/TooltipLinkingMenu.tsx | 22 +- src/client/util/TooltipTextMenu.tsx | 332 ++++++++++----------- src/client/util/TypedEvent.ts | 62 ++-- src/client/util/UndoManager.ts | 16 +- src/client/views/CollectionLinearView.tsx | 12 +- src/client/views/ContextMenu.tsx | 6 +- src/client/views/ContextMenuItem.tsx | 2 +- src/client/views/DictationOverlay.tsx | 10 +- src/client/views/DocComponent.tsx | 5 +- src/client/views/DocumentButtonBar.scss | 32 +- src/client/views/DocumentButtonBar.tsx | 57 ++-- src/client/views/DocumentDecorations.tsx | 97 +++--- src/client/views/GlobalKeyHandler.ts | 30 +- src/client/views/InkSelectDecorations.tsx | 10 +- src/client/views/InkingControl.tsx | 26 +- src/client/views/InkingStroke.tsx | 26 +- src/client/views/MainView.tsx | 36 +-- src/client/views/MainViewModal.tsx | 6 +- src/client/views/MetadataEntryMenu.tsx | 4 +- src/client/views/OverlayView.tsx | 6 +- src/client/views/PreviewCursor.tsx | 12 +- src/client/views/ScriptBox.tsx | 8 +- src/client/views/TemplateMenu.tsx | 32 +- src/client/views/Touchable.tsx | 6 +- .../views/collections/CollectionDockingView.tsx | 92 +++--- .../collections/CollectionMasonryViewFieldRow.tsx | 82 ++--- .../views/collections/CollectionSchemaCells.tsx | 42 +-- .../views/collections/CollectionSchemaHeaders.tsx | 36 ++- .../CollectionSchemaMovableTableHOC.tsx | 62 ++-- .../views/collections/CollectionSchemaView.tsx | 118 ++++---- .../views/collections/CollectionStackingView.tsx | 120 ++++---- .../CollectionStackingViewFieldColumn.tsx | 83 +++--- .../views/collections/CollectionStaffView.tsx | 6 +- src/client/views/collections/CollectionSubView.tsx | 66 ++-- .../views/collections/CollectionTreeView.tsx | 144 ++++----- src/client/views/collections/CollectionView.tsx | 28 +- .../views/collections/CollectionViewChromes.tsx | 67 ++--- src/client/views/collections/KeyRestrictionRow.tsx | 6 +- .../views/collections/ParentDocumentSelector.tsx | 4 +- .../CollectionFreeFormLayoutEngines.tsx | 10 +- .../CollectionFreeFormLinkView.tsx | 45 ++- .../CollectionFreeFormLinksView.tsx | 8 +- .../CollectionFreeFormRemoteCursors.tsx | 12 +- .../collectionFreeForm/CollectionFreeFormView.tsx | 231 +++++++------- .../collectionFreeForm/MarqueeOptionsMenu.tsx | 2 +- .../collections/collectionFreeForm/MarqueeView.tsx | 137 +++++---- .../caption_toggle/DetailedCaptionToggle.tsx | 6 +- src/client/views/linking/LinkEditor.tsx | 82 ++--- src/client/views/linking/LinkFollowBox.tsx | 48 +-- src/client/views/linking/LinkMenu.tsx | 6 +- src/client/views/linking/LinkMenuGroup.tsx | 22 +- src/client/views/linking/LinkMenuItem.tsx | 14 +- src/client/views/nodes/AudioBox.tsx | 24 +- src/client/views/nodes/ButtonBox.tsx | 8 +- .../views/nodes/CollectionFreeFormDocumentView.tsx | 20 +- .../views/nodes/ContentFittingDocumentView.tsx | 4 +- src/client/views/nodes/DocuLinkBox.tsx | 26 +- src/client/views/nodes/DocumentContentsView.tsx | 2 +- src/client/views/nodes/DocumentView.tsx | 72 ++--- src/client/views/nodes/FaceRectangle.tsx | 2 +- src/client/views/nodes/FaceRectangles.tsx | 8 +- src/client/views/nodes/FontIconBox.tsx | 8 +- src/client/views/nodes/FormattedTextBox.tsx | 197 ++++++------ src/client/views/nodes/FormattedTextBoxComment.tsx | 22 +- src/client/views/nodes/IconBox.tsx | 6 +- src/client/views/nodes/ImageBox.tsx | 86 +++--- src/client/views/nodes/KeyValueBox.tsx | 40 +-- src/client/views/nodes/KeyValuePair.tsx | 8 +- src/client/views/nodes/PDFBox.tsx | 8 +- src/client/views/nodes/PresBox.tsx | 38 +-- src/client/views/nodes/VideoBox.tsx | 66 ++-- src/client/views/nodes/WebBox.tsx | 22 +- src/client/views/pdf/Annotation.tsx | 14 +- src/client/views/pdf/PDFMenu.tsx | 2 +- src/client/views/pdf/PDFViewer.tsx | 66 ++-- .../views/presentationview/PresElementBox.tsx | 12 +- src/client/views/search/FilterBox.tsx | 44 +-- src/client/views/search/IconButton.tsx | 2 +- src/client/views/search/NaviconButton.tsx | 22 +- src/client/views/search/SearchBox.tsx | 20 +- src/client/views/search/SearchItem.tsx | 26 +- src/client/views/search/ToggleBar.tsx | 3 +- src/debug/Viewer.tsx | 4 +- src/mobile/ImageUpload.tsx | 10 +- src/new_fields/Doc.ts | 94 +++--- src/new_fields/List.ts | 4 +- src/new_fields/RichTextUtils.ts | 76 ++--- src/new_fields/Schema.ts | 2 +- src/new_fields/ScriptField.ts | 10 +- src/server/ActionUtilities.ts | 29 +- src/server/ApiManagers/DownloadManager.ts | 3 +- src/server/ApiManagers/GeneralGoogleManager.ts | 17 +- src/server/ApiManagers/GooglePhotosManager.ts | 4 +- src/server/ApiManagers/PDFManager.ts | 12 +- src/server/ApiManagers/SearchManager.ts | 12 +- src/server/ApiManagers/UploadManager.ts | 16 +- src/server/ApiManagers/UtilManager.ts | 8 +- src/server/DashUploadUtils.ts | 12 +- src/server/Initialization.ts | 15 +- src/server/RouteManager.ts | 16 +- src/server/Websocket/Websocket.ts | 15 +- src/server/apis/google/GoogleApiServerUtils.ts | 8 +- src/server/authentication/config/passport.ts | 2 +- .../authentication/controllers/user_controller.ts | 5 +- .../authentication/models/current_user_utils.ts | 6 +- src/server/database.ts | 4 +- src/server/downsize.ts | 2 +- src/server/index.ts | 11 +- src/typings/index.d.ts | 1 + test/test.ts | 18 +- tslint.json | 58 ++-- 141 files changed, 2467 insertions(+), 2429 deletions(-) (limited to 'src') diff --git a/package.json b/package.json index 3725d76eb..499aefdb5 100644 --- a/package.json +++ b/package.json @@ -127,6 +127,7 @@ "child_process": "^1.0.2", "class-transformer": "^0.2.0", "color": "^3.1.2", + "colors": "^1.4.0", "connect-flash": "^0.1.1", "connect-mongo": "^2.0.3", "cookie-parser": "^1.4.4", @@ -228,4 +229,4 @@ "xoauth2": "^1.2.0", "youtube": "^0.1.0" } -} +} \ No newline at end of file diff --git a/src/Utils.ts b/src/Utils.ts index 2543743a4..7401ef981 100644 --- a/src/Utils.ts +++ b/src/Utils.ts @@ -54,7 +54,7 @@ export namespace Utils { } export function CopyText(text: string) { - var textArea = document.createElement("textarea"); + const textArea = document.createElement("textarea"); textArea.value = text; document.body.appendChild(textArea); textArea.focus(); @@ -66,14 +66,14 @@ export namespace Utils { } export function fromRGBAstr(rgba: string) { - let rm = rgba.match(/rgb[a]?\(([ 0-9]+)/); - let r = rm ? Number(rm[1]) : 0; - let gm = rgba.match(/rgb[a]?\([ 0-9]+,([ 0-9]+)/); - let g = gm ? Number(gm[1]) : 0; - let bm = rgba.match(/rgb[a]?\([ 0-9]+,[ 0-9]+,([ 0-9]+)/); - let b = bm ? Number(bm[1]) : 0; - let am = rgba.match(/rgba?\([ 0-9]+,[ 0-9]+,[ 0-9]+,([ .0-9]+)/); - let a = am ? Number(am[1]) : 1; + const rm = rgba.match(/rgb[a]?\(([ 0-9]+)/); + const r = rm ? Number(rm[1]) : 0; + const gm = rgba.match(/rgb[a]?\([ 0-9]+,([ 0-9]+)/); + const g = gm ? Number(gm[1]) : 0; + const bm = rgba.match(/rgb[a]?\([ 0-9]+,[ 0-9]+,([ 0-9]+)/); + const b = bm ? Number(bm[1]) : 0; + const am = rgba.match(/rgba?\([ 0-9]+,[ 0-9]+,[ 0-9]+,([ .0-9]+)/); + const a = am ? Number(am[1]) : 1; return { r: r, g: g, b: b, a: a }; } @@ -86,10 +86,10 @@ export namespace Utils { // s /= 100; // l /= 100; - let c = (1 - Math.abs(2 * l - 1)) * s, + const c = (1 - Math.abs(2 * l - 1)) * s, x = c * (1 - Math.abs((h / 60) % 2 - 1)), - m = l - c / 2, - r = 0, + m = l - c / 2; + let r = 0, g = 0, b = 0; if (0 <= h && h < 60) { @@ -118,10 +118,10 @@ export namespace Utils { b /= 255; // Find greatest and smallest channel values - let cmin = Math.min(r, g, b), + const cmin = Math.min(r, g, b), cmax = Math.max(r, g, b), - delta = cmax - cmin, - h = 0, + delta = cmax - cmin; + let h = 0, s = 0, l = 0; // Calculate hue @@ -173,11 +173,11 @@ export namespace Utils { function project(px: number, py: number, ax: number, ay: number, bx: number, by: number) { if (ax === bx && ay === by) return { point: { x: ax, y: ay }, left: false, dot: 0, t: 0 }; - var atob = { x: bx - ax, y: by - ay }; - var atop = { x: px - ax, y: py - ay }; - var len = atob.x * atob.x + atob.y * atob.y; + const atob = { x: bx - ax, y: by - ay }; + const atop = { x: px - ax, y: py - ay }; + const len = atob.x * atob.x + atob.y * atob.y; var dot = atop.x * atob.x + atop.y * atob.y; - var t = Math.min(1, Math.max(0, dot / len)); + const t = Math.min(1, Math.max(0, dot / len)); dot = (bx - ax) * (py - ay) - (by - ay) * (px - ax); @@ -195,38 +195,38 @@ export namespace Utils { export function closestPtBetweenRectangles(l: number, t: number, w: number, h: number, l1: number, t1: number, w1: number, h1: number, x: number, y: number) { - var r = l + w, + const r = l + w, b = t + h; - var r1 = l1 + w1, + const r1 = l1 + w1, b1 = t1 + h1; - let hsegs = [[l, r, t, l1, r1, t1], [l, r, b, l1, r1, t1], [l, r, t, l1, r1, b1], [l, r, b, l1, r1, b1]]; - let vsegs = [[l, t, b, l1, t1, b1], [r, t, b, l1, t1, b1], [l, t, b, r1, t1, b1], [r, t, b, r1, t1, b1]]; - let res = hsegs.reduce((closest, seg) => { - let res = distanceBetweenHorizontalLines(seg[0], seg[1], seg[2], seg[3], seg[4], seg[5]); + const hsegs = [[l, r, t, l1, r1, t1], [l, r, b, l1, r1, t1], [l, r, t, l1, r1, b1], [l, r, b, l1, r1, b1]]; + const vsegs = [[l, t, b, l1, t1, b1], [r, t, b, l1, t1, b1], [l, t, b, r1, t1, b1], [r, t, b, r1, t1, b1]]; + const res = hsegs.reduce((closest, seg) => { + const res = distanceBetweenHorizontalLines(seg[0], seg[1], seg[2], seg[3], seg[4], seg[5]); return (res[0] < closest[0]) ? res : closest; }, [Number.MAX_VALUE, []] as [number, number[]]); - let fres = vsegs.reduce((closest, seg) => { - let res = distanceBetweenVerticalLines(seg[0], seg[1], seg[2], seg[3], seg[4], seg[5]); + const fres = vsegs.reduce((closest, seg) => { + const res = distanceBetweenVerticalLines(seg[0], seg[1], seg[2], seg[3], seg[4], seg[5]); return (res[0] < closest[0]) ? res : closest; }, res); - let near = project(x, y, fres[1][0], fres[1][1], fres[1][2], fres[1][3]); + const near = project(x, y, fres[1][0], fres[1][1], fres[1][2], fres[1][3]); return project(near.point.x, near.point.y, fres[1][0], fres[1][1], fres[1][2], fres[1][3]); } export function getNearestPointInPerimeter(l: number, t: number, w: number, h: number, x: number, y: number) { - var r = l + w, + const r = l + w, b = t + h; - var x = clamp(x, l, r), + x = clamp(x, l, r), y = clamp(y, t, b); - var dl = Math.abs(x - l), + const dl = Math.abs(x - l), dr = Math.abs(x - r), dt = Math.abs(y - t), db = Math.abs(y - b); - var m = Math.min(dl, dr, dt, db); + const m = Math.min(dl, dr, dt, db); return (m === dt) ? [x, t] : (m === db) ? [x, b] : @@ -234,7 +234,7 @@ export namespace Utils { } export function GetClipboardText(): string { - var textArea = document.createElement("textarea"); + const textArea = document.createElement("textarea"); document.body.appendChild(textArea); textArea.focus(); textArea.select(); @@ -257,7 +257,7 @@ export namespace Utils { if (logFilter !== undefined && logFilter !== message.type) { return; } - let idString = (message.id || "").padStart(36, ' '); + const idString = (message.id || "").padStart(36, ' '); prefix = prefix.padEnd(16, ' '); console.log(`${prefix}: ${idString}, ${receiving ? 'receiving' : 'sending'} ${messageName} with data ${JSON.stringify(message)} `); } @@ -309,18 +309,18 @@ export function OmitKeys(obj: any, keys: string[], addKeyFunc?: (dup: any) => vo } export function WithKeys(obj: any, keys: string[], addKeyFunc?: (dup: any) => void) { - var dup: any = {}; + const dup: any = {}; keys.forEach(key => dup[key] = obj[key]); addKeyFunc && addKeyFunc(dup); return dup; } export function timenow() { - var now = new Date(); + const now = new Date(); let ampm = 'am'; let h = now.getHours(); let m: any = now.getMinutes(); - let s: any = now.getSeconds(); + const s: any = now.getSeconds(); if (h >= 12) { if (h > 12) h -= 12; ampm = 'pm'; @@ -331,8 +331,8 @@ export function timenow() { export function aggregateBounds(boundsList: { x: number, y: number, width: number, height: number }[]) { return boundsList.reduce((bounds, b) => { - var [sptX, sptY] = [b.x, b.y]; - let [bptX, bptY] = [sptX + b.width, sptY + b.height]; + const [sptX, sptY] = [b.x, b.y]; + const [bptX, bptY] = [sptX + b.width, sptY + b.height]; return { x: Math.min(sptX, bounds.x), y: Math.min(sptY, bounds.y), r: Math.max(bptX, bounds.r), b: Math.max(bptY, bounds.b) @@ -371,10 +371,11 @@ export type Without = Pick>; export type Predicate = (entry: [K, V]) => boolean; export function DeepCopy(source: Map, predicate?: Predicate) { - let deepCopy = new Map(); - let entries = source.entries(), next = entries.next(); + const deepCopy = new Map(); + const entries = source.entries(); + let next = entries.next(); while (!next.done) { - let entry = next.value; + const entry = next.value; if (!predicate || predicate(entry)) { deepCopy.set(entry[0], entry[1]); } @@ -427,13 +428,13 @@ export function smoothScroll(duration: number, element: HTMLElement, to: number) animateScroll(); } export function addStyleSheet(styleType: string = "text/css") { - let style = document.createElement("style"); + const style = document.createElement("style"); style.type = styleType; - var sheets = document.head.appendChild(style); + const sheets = document.head.appendChild(style); return (sheets as any).sheet; } export function addStyleSheetRule(sheet: any, selector: any, css: any) { - var propText = typeof css === "string" ? css : Object.keys(css).map(p => p + ":" + (p === "content" ? "'" + css[p] + "'" : css[p])).join(";"); + const propText = typeof css === "string" ? css : Object.keys(css).map(p => p + ":" + (p === "content" ? "'" + css[p] + "'" : css[p])).join(";"); return sheet.insertRule("." + selector + "{" + propText + "}", sheet.cssRules.length); } export function removeStyleSheetRule(sheet: any, rule: number) { diff --git a/src/client/DocServer.ts b/src/client/DocServer.ts index 2cec1046b..e4b183715 100644 --- a/src/client/DocServer.ts +++ b/src/client/DocServer.ts @@ -1,5 +1,5 @@ import * as OpenSocket from 'socket.io-client'; -import { MessageStore, Diff, YoutubeQueryTypes } from "./../server/Message"; +import { MessageStore, YoutubeQueryTypes } from "./../server/Message"; import { Opt, Doc } from '../new_fields/Doc'; import { Utils, emptyFunction } from '../Utils'; import { SerializationHelper } from './util/SerializationHelper'; @@ -148,7 +148,7 @@ export namespace DocServer { // an initial pass through the cache to determine whether the document needs to be fetched, // is already in the process of being fetched or already exists in the // cache - let cached = _cache[id]; + const cached = _cache[id]; if (cached === undefined) { // NOT CACHED => we'll have to send a request to the server @@ -195,7 +195,7 @@ export namespace DocServer { } export async function getYoutubeChannels() { - let apiKey = await Utils.EmitCallback(_socket, MessageStore.YoutubeApiQuery, { type: YoutubeQueryTypes.Channels }); + const apiKey = await Utils.EmitCallback(_socket, MessageStore.YoutubeApiQuery, { type: YoutubeQueryTypes.Channels }); return apiKey; } @@ -255,7 +255,7 @@ export namespace DocServer { for (const field of fields) { if (field !== undefined) { // deserialize - let prom = SerializationHelper.Deserialize(field).then(deserialized => { + const prom = SerializationHelper.Deserialize(field).then(deserialized => { fieldMap[field.id] = deserialized; //overwrite or delete any promises (that we inserted as flags @@ -411,7 +411,7 @@ export namespace DocServer { } let _RespondToUpdate = _respondToUpdateImpl; - let _respondToDelete = _respondToDeleteImpl; + const _respondToDelete = _respondToDeleteImpl; function respondToUpdate(diff: any) { _RespondToUpdate(diff); diff --git a/src/client/Network.ts b/src/client/Network.ts index f9ef27267..ccf60f199 100644 --- a/src/client/Network.ts +++ b/src/client/Network.ts @@ -8,7 +8,7 @@ export namespace Networking { } export async function PostToServer(relativeRoute: string, body?: any) { - let options = { + const options = { uri: Utils.prepend(relativeRoute), method: "POST", body, diff --git a/src/client/apis/GoogleAuthenticationManager.tsx b/src/client/apis/GoogleAuthenticationManager.tsx index ae77c4b7b..ce1277667 100644 --- a/src/client/apis/GoogleAuthenticationManager.tsx +++ b/src/client/apis/GoogleAuthenticationManager.tsx @@ -30,7 +30,7 @@ export default class GoogleAuthenticationManager extends React.Component<{}> { } public fetchOrGenerateAccessToken = async () => { - let response = await Networking.FetchFromServer("/readGoogleAccessToken"); + const response = await Networking.FetchFromServer("/readGoogleAccessToken"); // if this is an authentication url, activate the UI to register the new access token if (new RegExp(AuthenticationUrl).test(response)) { this.isOpen = true; diff --git a/src/client/apis/google_docs/GoogleApiClientUtils.ts b/src/client/apis/google_docs/GoogleApiClientUtils.ts index 26c7f8d2e..d2a79f189 100644 --- a/src/client/apis/google_docs/GoogleApiClientUtils.ts +++ b/src/client/apis/google_docs/GoogleApiClientUtils.ts @@ -1,4 +1,4 @@ -import { docs_v1, slides_v1 } from "googleapis"; +import { docs_v1 } from "googleapis"; import { Opt } from "../../../new_fields/Doc"; import { isArray } from "util"; import { EditorState } from "prosemirror-state"; @@ -94,7 +94,7 @@ export namespace GoogleApiClientUtils { export type ExtractResult = { text: string, paragraphs: DeconstructedParagraph[] }; export const extractText = (document: docs_v1.Schema$Document, removeNewlines = false): ExtractResult => { - let paragraphs = extractParagraphs(document); + const paragraphs = extractParagraphs(document); let text = paragraphs.map(paragraph => paragraph.contents.filter(content => !("inlineObjectId" in content)).map(run => run as docs_v1.Schema$TextRun).join("")).join(""); text = text.substring(0, text.length - 1); removeNewlines && text.ReplaceAll("\n", ""); @@ -107,14 +107,14 @@ export namespace GoogleApiClientUtils { const fragments: DeconstructedParagraph[] = []; if (document.body && document.body.content) { for (const element of document.body.content) { - let runs: ContentArray = []; + const runs: ContentArray = []; let bullet: Opt; if (element.paragraph) { if (element.paragraph.elements) { for (const inner of element.paragraph.elements) { if (inner) { if (inner.textRun) { - let run = inner.textRun; + const run = inner.textRun; (run.content || !filterEmpty) && runs.push(inner.textRun); } else if (inner.inlineObjectElement) { runs.push(inner.inlineObjectElement); @@ -182,8 +182,8 @@ export namespace GoogleApiClientUtils { export const read = async (options: ReadOptions): Promise> => { return retrieve({ documentId: options.documentId }).then(document => { if (document) { - let title = document.title!; - let body = Utils.extractText(document, options.removeNewlines).text; + const title = document.title!; + const body = Utils.extractText(document, options.removeNewlines).text; return { title, body }; } }); @@ -192,7 +192,7 @@ export namespace GoogleApiClientUtils { export const readLines = async (options: ReadOptions): Promise> => { return retrieve({ documentId: options.documentId }).then(document => { if (document) { - let title = document.title; + const title = document.title; let bodyLines = Utils.extractText(document).text.split("\n"); options.removeNewlines && (bodyLines = bodyLines.filter(line => line.length)); return { title, bodyLines }; @@ -201,7 +201,7 @@ export namespace GoogleApiClientUtils { }; export const setStyle = async (options: UpdateOptions) => { - let replies: any = await update({ + const replies: any = await update({ documentId: options.documentId, requests: options.requests }); @@ -221,7 +221,7 @@ export namespace GoogleApiClientUtils { let index = options.index; const mode = options.mode; if (!(index && mode === WriteMode.Insert)) { - let schema = await retrieve({ documentId }); + const schema = await retrieve({ documentId }); if (!schema || !(index = Utils.endOf(schema))) { return undefined; } @@ -248,7 +248,7 @@ export namespace GoogleApiClientUtils { return undefined; } requests.push(...options.content.requests); - let replies: any = await update({ documentId: documentId, requests }); + const replies: any = await update({ documentId: documentId, requests }); if ("errors" in replies) { console.log("Write operation failed:"); console.log(replies.errors.map((error: any) => error.message)); diff --git a/src/client/apis/google_docs/GooglePhotosClientUtils.ts b/src/client/apis/google_docs/GooglePhotosClientUtils.ts index bf8897061..966d8053a 100644 --- a/src/client/apis/google_docs/GooglePhotosClientUtils.ts +++ b/src/client/apis/google_docs/GooglePhotosClientUtils.ts @@ -128,10 +128,10 @@ export namespace GooglePhotos { export const CollectionFromSearch = async (constructor: CollectionConstructor, requested: Opt>): Promise => { await GoogleAuthenticationManager.Instance.fetchOrGenerateAccessToken(); - let response = await Query.ContentSearch(requested); - let uploads = await Transactions.WriteMediaItemsToServer(response); + const response = await Query.ContentSearch(requested); + const uploads = await Transactions.WriteMediaItemsToServer(response); const children = uploads.map((upload: Transactions.UploadInformation) => { - let document = Docs.Create.ImageDocument(Utils.fileUrl(upload.fileNames.clean)); + const document = Docs.Create.ImageDocument(Utils.fileUrl(upload.fileNames.clean)); document.fillColumn = true; document.contentSize = upload.contentSize; return document; @@ -157,12 +157,12 @@ export namespace GooglePhotos { const images = (await DocListCastAsync(collection.data))!.map(Doc.GetProto); images && images.forEach(image => tagMapping.set(image[Id], ContentCategories.NONE)); const values = Object.values(ContentCategories); - for (let value of values) { + for (const value of values) { if (value !== ContentCategories.NONE) { const results = await ContentSearch({ included: [value] }); if (results.mediaItems) { const ids = results.mediaItems.map(item => item.id); - for (let id of ids) { + for (const id of ids) { const image = await Cast(idMapping[id], Doc); if (image) { const key = image[Id]; @@ -220,9 +220,9 @@ export namespace GooglePhotos { export const AlbumSearch = async (albumId: string, pageSize = 100): Promise => { const photos = await endpoint(); - let mediaItems: MediaItem[] = []; + const mediaItems: MediaItem[] = []; let nextPageTokenStored: Opt = undefined; - let found = 0; + const found = 0; do { const response: any = await photos.mediaItems.search(albumId, pageSize, nextPageTokenStored); mediaItems.push(...response.mediaItems); @@ -332,7 +332,7 @@ export namespace GooglePhotos { album = await Create.Album(album.title); } const media: MediaInput[] = []; - for (let source of sources) { + for (const source of sources) { const data = Cast(Doc.GetProto(source).data, ImageField); if (!data) { return; diff --git a/src/client/apis/youtube/YoutubeBox.tsx b/src/client/apis/youtube/YoutubeBox.tsx index bed812852..fd3d9e2f1 100644 --- a/src/client/apis/youtube/YoutubeBox.tsx +++ b/src/client/apis/youtube/YoutubeBox.tsx @@ -48,44 +48,44 @@ export class YoutubeBox extends React.Component { */ async componentWillMount() { //DocServer.getYoutubeChannels(); - let castedSearchBackUp = Cast(this.props.Document.cachedSearchResults, Doc); - let awaitedBackUp = await castedSearchBackUp; - let castedDetailBackUp = Cast(this.props.Document.cachedDetails, Doc); - let awaitedDetails = await castedDetailBackUp; + const castedSearchBackUp = Cast(this.props.Document.cachedSearchResults, Doc); + const awaitedBackUp = await castedSearchBackUp; + const castedDetailBackUp = Cast(this.props.Document.cachedDetails, Doc); + const awaitedDetails = await castedDetailBackUp; if (awaitedBackUp) { - let jsonList = await DocListCastAsync(awaitedBackUp.json); - let jsonDetailList = await DocListCastAsync(awaitedDetails!.json); + const jsonList = await DocListCastAsync(awaitedBackUp.json); + const jsonDetailList = await DocListCastAsync(awaitedDetails!.json); if (jsonList!.length !== 0) { runInAction(() => this.searchResultsFound = true); let index = 0; //getting the necessary information from backUps and building templates that will be used to map in render - for (let video of jsonList!) { - - let videoId = await Cast(video.id, Doc); - let id = StrCast(videoId!.videoId); - let snippet = await Cast(video.snippet, Doc); - let videoTitle = this.filterYoutubeTitleResult(StrCast(snippet!.title)); - let thumbnail = await Cast(snippet!.thumbnails, Doc); - let thumbnailMedium = await Cast(thumbnail!.medium, Doc); - let thumbnailUrl = StrCast(thumbnailMedium!.url); - let videoDescription = StrCast(snippet!.description); - let pusblishDate = (this.roundPublishTime(StrCast(snippet!.publishedAt)))!; - let channelTitle = StrCast(snippet!.channelTitle); + for (const video of jsonList!) { + + const videoId = await Cast(video.id, Doc); + const id = StrCast(videoId!.videoId); + const snippet = await Cast(video.snippet, Doc); + const videoTitle = this.filterYoutubeTitleResult(StrCast(snippet!.title)); + const thumbnail = await Cast(snippet!.thumbnails, Doc); + const thumbnailMedium = await Cast(thumbnail!.medium, Doc); + const thumbnailUrl = StrCast(thumbnailMedium!.url); + const videoDescription = StrCast(snippet!.description); + const pusblishDate = (this.roundPublishTime(StrCast(snippet!.publishedAt)))!; + const channelTitle = StrCast(snippet!.channelTitle); let duration: string = ""; let viewCount: string = ""; if (jsonDetailList!.length !== 0) { - let contentDetails = await Cast(jsonDetailList![index].contentDetails, Doc); - let statistics = await Cast(jsonDetailList![index].statistics, Doc); + const contentDetails = await Cast(jsonDetailList![index].contentDetails, Doc); + const statistics = await Cast(jsonDetailList![index].statistics, Doc); duration = this.convertIsoTimeToDuration(StrCast(contentDetails!.duration)); viewCount = this.abbreviateViewCount(parseInt(StrCast(statistics!.viewCount)))!; } index = index + 1; - let newTemplate: VideoTemplate = { videoId: id, videoTitle: videoTitle, thumbnailUrl: thumbnailUrl, publishDate: pusblishDate, channelTitle: channelTitle, videoDescription: videoDescription, duration: duration, viewCount: viewCount }; + const newTemplate: VideoTemplate = { videoId: id, videoTitle: videoTitle, thumbnailUrl: thumbnailUrl, publishDate: pusblishDate, channelTitle: channelTitle, videoDescription: videoDescription, duration: duration, viewCount: viewCount }; runInAction(() => this.curVideoTemplates.push(newTemplate)); } } @@ -115,7 +115,7 @@ export class YoutubeBox extends React.Component { */ onEnterKeyDown = (e: React.KeyboardEvent) => { if (e.keyCode === 13) { - let submittedTitle = this.YoutubeSearchElement!.value; + const submittedTitle = this.YoutubeSearchElement!.value; this.YoutubeSearchElement!.value = ""; this.YoutubeSearchElement!.blur(); DocServer.getYoutubeVideos(submittedTitle, this.processesVideoResults); @@ -184,23 +184,23 @@ export class YoutubeBox extends React.Component { * difference between today's date and that date, in terms of "ago" to imitate youtube. */ roundPublishTime = (publishTime: string) => { - let date = new Date(publishTime).getTime(); - let curDate = new Date().getTime(); - let timeDif = curDate - date; - let totalSeconds = timeDif / 1000; - let totalMin = totalSeconds / 60; - let totalHours = totalMin / 60; - let totalDays = totalHours / 24; - let totalMonths = totalDays / 30.417; - let totalYears = totalMonths / 12; - - - let truncYears = Math.trunc(totalYears); - let truncMonths = Math.trunc(totalMonths); - let truncDays = Math.trunc(totalDays); - let truncHours = Math.trunc(totalHours); - let truncMin = Math.trunc(totalMin); - let truncSec = Math.trunc(totalSeconds); + const date = new Date(publishTime).getTime(); + const curDate = new Date().getTime(); + const timeDif = curDate - date; + const totalSeconds = timeDif / 1000; + const totalMin = totalSeconds / 60; + const totalHours = totalMin / 60; + const totalDays = totalHours / 24; + const totalMonths = totalDays / 30.417; + const totalYears = totalMonths / 12; + + + const truncYears = Math.trunc(totalYears); + const truncMonths = Math.trunc(totalMonths); + const truncDays = Math.trunc(totalDays); + const truncHours = Math.trunc(totalHours); + const truncMin = Math.trunc(totalMin); + const truncSec = Math.trunc(totalSeconds); let pluralCase = ""; @@ -230,7 +230,7 @@ export class YoutubeBox extends React.Component { */ convertIsoTimeToDuration = (isoDur: string) => { - let convertedTime = isoDur.replace(/D|H|M/g, ":").replace(/P|T|S/g, "").split(":"); + const convertedTime = isoDur.replace(/D|H|M/g, ":").replace(/P|T|S/g, "").split(":"); if (1 === convertedTime.length) { 2 !== convertedTime[0].length && (convertedTime[0] = "0" + convertedTime[0]), convertedTime[0] = "0:" + convertedTime[0]; @@ -269,10 +269,10 @@ export class YoutubeBox extends React.Component { if (this.searchResults.length !== 0) { return
      {this.searchResults.map((video, index) => { - let filteredTitle = this.filterYoutubeTitleResult(video.snippet.title); - let channelTitle = video.snippet.channelTitle; - let videoDescription = video.snippet.description; - let pusblishDate = this.roundPublishTime(video.snippet.publishedAt); + const filteredTitle = this.filterYoutubeTitleResult(video.snippet.title); + const channelTitle = video.snippet.channelTitle; + const videoDescription = video.snippet.description; + const pusblishDate = this.roundPublishTime(video.snippet.publishedAt); let duration; let viewCount; if (this.videoDetails.length !== 0) { @@ -331,26 +331,26 @@ export class YoutubeBox extends React.Component { */ @action embedVideoOnClick = (videoId: string, filteredTitle: string) => { - let embeddedUrl = "https://www.youtube.com/embed/" + videoId; + const embeddedUrl = "https://www.youtube.com/embed/" + videoId; this.selectedVideoUrl = embeddedUrl; - let addFunction = this.props.addDocument!; - let newVideoX = NumCast(this.props.Document.x); - let newVideoY = NumCast(this.props.Document.y) + NumCast(this.props.Document.height); + const addFunction = this.props.addDocument!; + const newVideoX = NumCast(this.props.Document.x); + const newVideoY = NumCast(this.props.Document.y) + NumCast(this.props.Document.height); addFunction(Docs.Create.VideoDocument(embeddedUrl, { title: filteredTitle, width: 400, height: 315, x: newVideoX, y: newVideoY })); this.videoClicked = true; } render() { - let content = + const content =
      this.YoutubeSearchElement = e!} /> {this.renderSearchResultsOrVideo()}
      ; - let frozen = !this.props.isSelected() || DocumentDecorations.Instance.Interacting; + const frozen = !this.props.isSelected() || DocumentDecorations.Instance.Interacting; - let classname = "webBox-cont" + (this.props.isSelected() && !InkingControl.Instance.selectedTool && !DocumentDecorations.Instance.Interacting ? "-interactive" : ""); + const classname = "webBox-cont" + (this.props.isSelected() && !InkingControl.Instance.selectedTool && !DocumentDecorations.Instance.Interacting ? "-interactive" : ""); return ( <>
      diff --git a/src/client/cognitive_services/CognitiveServices.ts b/src/client/cognitive_services/CognitiveServices.ts index 5a7f5e991..02eff3b25 100644 --- a/src/client/cognitive_services/CognitiveServices.ts +++ b/src/client/cognitive_services/CognitiveServices.ts @@ -1,5 +1,5 @@ import * as request from "request-promise"; -import { Doc, Field, Opt } from "../../new_fields/Doc"; +import { Doc, Field } from "../../new_fields/Doc"; import { Cast } from "../../new_fields/Types"; import { Docs } from "../documents/Documents"; import { Utils } from "../../Utils"; @@ -101,14 +101,14 @@ export namespace CognitiveServices { export namespace Appliers { export const ProcessImage: AnalysisApplier = async (target: Doc, keys: string[], url: string, service: Service, converter: Converter) => { - let batch = UndoManager.StartBatch("Image Analysis"); + const batch = UndoManager.StartBatch("Image Analysis"); - let storageKey = keys[0]; + const storageKey = keys[0]; if (!url || await Cast(target[storageKey], Doc)) { return; } let toStore: any; - let results = await ExecuteQuery(service, Manager, url); + const results = await ExecuteQuery(service, Manager, url); if (!results) { toStore = "Cognitive Services could not process the given image URL."; } else { @@ -131,36 +131,32 @@ export namespace CognitiveServices { export namespace Inking { - export const Manager: APIManager = { - - converter: (inkData: InkData): string => { - let entries = inkData.entries(), next = entries.next(); - let strokes: AzureStrokeData[] = [], id = 0; - while (!next.done) { - strokes.push({ - id: id++, - points: next.value[1].pathData.map(point => `${point.x},${point.y}`).join(","), - language: "en-US" - }); - next = entries.next(); - } + export const Manager: APIManager = { + + converter: (inkData: InkData[]): string => { + let id = 0; + const strokes: AzureStrokeData[] = inkData.map(points => ({ + id: id++, + points: points.map(({ x, y }) => `${x},${y}`).join(","), + language: "en-US" + })); return JSON.stringify({ version: 1, language: "en-US", unit: "mm", - strokes: strokes + strokes }); }, requester: async (apiKey: string, body: string) => { - let xhttp = new XMLHttpRequest(); - let serverAddress = "https://api.cognitive.microsoft.com"; - let endpoint = serverAddress + "/inkrecognizer/v1.0-preview/recognize"; + const xhttp = new XMLHttpRequest(); + const serverAddress = "https://api.cognitive.microsoft.com"; + const endpoint = serverAddress + "/inkrecognizer/v1.0-preview/recognize"; - let promisified = (resolve: any, reject: any) => { + const promisified = (resolve: any, reject: any) => { xhttp.onreadystatechange = function () { if (this.readyState === 4) { - let result = xhttp.responseText; + const result = xhttp.responseText; switch (this.status) { case 200: return resolve(result); @@ -184,15 +180,15 @@ export namespace CognitiveServices { export namespace Appliers { - export const ConcatenateHandwriting: AnalysisApplier = async (target: Doc, keys: string[], inkData: InkData) => { - let batch = UndoManager.StartBatch("Ink Analysis"); + export const ConcatenateHandwriting: AnalysisApplier = async (target: Doc, keys: string[], inkData: InkData[]) => { + const batch = UndoManager.StartBatch("Ink Analysis"); let results = await ExecuteQuery(Service.Handwriting, Manager, inkData); if (results) { results.recognitionUnits && (results = results.recognitionUnits); target[keys[0]] = Docs.Get.DocumentHierarchyFromJson(results, "Ink Analysis"); - let recognizedText = results.map((item: any) => item.recognizedText); - let individualWords = recognizedText.filter((text: string) => text && text.split(" ").length === 1); + const recognizedText = results.map((item: any) => item.recognizedText); + const individualWords = recognizedText.filter((text: string) => text && text.split(" ").length === 1); target[keys[1]] = individualWords.join(" "); } diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index d1e3ea708..e0f2858ba 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -35,10 +35,10 @@ import { CollectionDockingView } from "../views/collections/CollectionDockingVie import { LinkManager } from "../util/LinkManager"; import { DocumentManager } from "../util/DocumentManager"; import DirectoryImportBox from "../util/Import & Export/DirectoryImportBox"; -import { Scripting, CompileScript } from "../util/Scripting"; +import { Scripting } from "../util/Scripting"; import { ButtonBox } from "../views/nodes/ButtonBox"; import { FontIconBox } from "../views/nodes/FontIconBox"; -import { SchemaHeaderField, RandomPastel } from "../../new_fields/SchemaHeaderField"; +import { SchemaHeaderField } from "../../new_fields/SchemaHeaderField"; import { PresBox } from "../views/nodes/PresBox"; import { ComputedField, ScriptField } from "../../new_fields/ScriptField"; import { ProxyField } from "../../new_fields/Proxy"; @@ -50,8 +50,8 @@ import { ColorBox } from "../views/nodes/ColorBox"; import { DocuLinkBox } from "../views/nodes/DocuLinkBox"; import { InkingStroke } from "../views/InkingStroke"; import { InkField } from "../../new_fields/InkField"; -var requestImageSize = require('../util/request-image-size'); -var path = require('path'); +const requestImageSize = require('../util/request-image-size'); +const path = require('path'); export interface DocumentOptions { x?: number; @@ -239,16 +239,16 @@ export namespace Docs { ProxyField.initPlugin(); ComputedField.initPlugin(); // non-guid string ids for each document prototype - let prototypeIds = Object.values(DocumentType).filter(type => type !== DocumentType.NONE).map(type => type + suffix); + const prototypeIds = Object.values(DocumentType).filter(type => type !== DocumentType.NONE).map(type => type + suffix); // fetch the actual prototype documents from the server - let actualProtos = await DocServer.GetRefFields(prototypeIds); + const actualProtos = await DocServer.GetRefFields(prototypeIds); // update this object to include any default values: DocumentOptions for all prototypes prototypeIds.map(id => { - let existing = actualProtos[id] as Doc; - let type = id.replace(suffix, "") as DocumentType; + const existing = actualProtos[id] as Doc; + const type = id.replace(suffix, "") as DocumentType; // get or create prototype of the specified type... - let target = existing || buildPrototype(type, id); + const target = existing || buildPrototype(type, id); // ...and set it if not undefined (can be undefined only if TemplateMap does not contain // an entry dedicated to the given DocumentType) target && PrototypeMap.set(type, target); @@ -287,17 +287,17 @@ export namespace Docs { */ function buildPrototype(type: DocumentType, prototypeId: string): Opt { // load template from type - let template = TemplateMap.get(type); + const template = TemplateMap.get(type); if (!template) { return undefined; } - let layout = template.layout; + const layout = template.layout; // create title - let upper = suffix.toUpperCase(); - let title = prototypeId.toUpperCase().replace(upper, `_${upper}`); + const upper = suffix.toUpperCase(); + const title = prototypeId.toUpperCase().replace(upper, `_${upper}`); // synthesize the default options, the type and title from computed values and // whatever options pertain to this specific prototype - let options = { title, type, baseProto: true, ...defaultOptions, ...(template.options || {}) }; + const options = { title, type, baseProto: true, ...defaultOptions, ...(template.options || {}) }; options.layout = layout.view.LayoutString(layout.dataField); return Doc.assign(new Doc(prototypeId, true), { ...options }); } @@ -343,8 +343,8 @@ export namespace Docs { protoProps.isPrototype = true; - let dataDoc = MakeDataDelegate(proto, protoProps, data); - let viewDoc = Doc.MakeDelegate(dataDoc, delegId); + const dataDoc = MakeDataDelegate(proto, protoProps, data); + const viewDoc = Doc.MakeDelegate(dataDoc, delegId); AudioBox.ActiveRecordings.map(d => DocUtils.MakeLink({ doc: viewDoc }, { doc: d }, "audio link", "link to audio: " + d.title)); @@ -370,16 +370,16 @@ export namespace Docs { } export function ImageDocument(url: string, options: DocumentOptions = {}) { - let imgField = new ImageField(new URL(url)); - let inst = InstanceFromProto(Prototypes.get(DocumentType.IMG), imgField, { title: path.basename(url), ...options }); + const imgField = new ImageField(new URL(url)); + const inst = InstanceFromProto(Prototypes.get(DocumentType.IMG), imgField, { title: path.basename(url), ...options }); let target = imgField.url.href; if (new RegExp(window.location.origin).test(target)) { - let extension = path.extname(target); + const extension = path.extname(target); target = `${target.substring(0, target.length - extension.length)}_o${extension}`; } requestImageSize(target) .then((size: any) => { - let aspect = size.height / size.width; + const aspect = size.height / size.width; if (!inst.nativeWidth) { inst.nativeWidth = size.width; } @@ -423,7 +423,7 @@ export namespace Docs { } export function InkDocument(color: string, tool: number, strokeWidth: number, points: { x: number, y: number }[], options: DocumentOptions = {}) { - let doc = InstanceFromProto(Prototypes.get(DocumentType.INK), new InkField(points), options); + const doc = InstanceFromProto(Prototypes.get(DocumentType.INK), new InkField(points), options); doc.color = color; doc.strokeWidth = strokeWidth; doc.tool = tool; @@ -439,12 +439,12 @@ export namespace Docs { } export async function DBDocument(url: string, options: DocumentOptions = {}, columnOptions: DocumentOptions = {}) { - let schemaName = options.title ? options.title : "-no schema-"; - let ctlog = await Gateway.Instance.GetSchema(url, schemaName); + const schemaName = options.title ? options.title : "-no schema-"; + const ctlog = await Gateway.Instance.GetSchema(url, schemaName); if (ctlog && ctlog.schemas) { - let schema = ctlog.schemas[0]; - let schemaDoc = Docs.Create.TreeDocument([], { ...options, nativeWidth: undefined, nativeHeight: undefined, width: 150, height: 100, title: schema.displayName! }); - let schemaDocuments = Cast(schemaDoc.data, listSpec(Doc), []); + const schema = ctlog.schemas[0]; + const schemaDoc = Docs.Create.TreeDocument([], { ...options, nativeWidth: undefined, nativeHeight: undefined, width: 150, height: 100, title: schema.displayName! }); + const schemaDocuments = Cast(schemaDoc.data, listSpec(Doc), []); if (!schemaDocuments) { return; } @@ -455,8 +455,8 @@ export namespace Docs { if (field instanceof Doc) { docs.push(field); } else { - var atmod = new ColumnAttributeModel(attr); - let histoOp = new HistogramOperation(schema.displayName!, + const atmod = new ColumnAttributeModel(attr); + const histoOp = new HistogramOperation(schema.displayName!, new AttributeTransformationModel(atmod, AggregateFunction.None), new AttributeTransformationModel(atmod, AggregateFunction.Count), new AttributeTransformationModel(atmod, AggregateFunction.Count)); @@ -523,7 +523,7 @@ export namespace Docs { } export function DockDocument(documents: Array, config: string, options: DocumentOptions, id?: string) { - let inst = InstanceFromProto(Prototypes.get(DocumentType.COL), new List(documents), { ...options, viewType: CollectionViewType.Docking, dockingConfig: config }, id); + const inst = InstanceFromProto(Prototypes.get(DocumentType.COL), new List(documents), { ...options, viewType: CollectionViewType.Docking, dockingConfig: config }, id); Doc.GetProto(inst).data = new List(documents); return inst; } @@ -538,7 +538,7 @@ export namespace Docs { }; export function StandardCollectionDockingDocument(configs: Array, options: DocumentOptions, id?: string, type: string = "row") { - let layoutConfig = { + const layoutConfig = { content: [ { type: type, @@ -603,7 +603,8 @@ export namespace Docs { * might involve arbitrary recursion (since toField might itself call convertObject) */ const convertObject = (object: any, title?: string): Doc => { - let target = new Doc(), result: Opt; + const target = new Doc(); + let result: Opt; Object.keys(object).map(key => (result = toField(object[key], key)) && (target[key] = result)); title && !target.title && (target.title = title); return target; @@ -617,7 +618,8 @@ export namespace Docs { * might involve arbitrary recursion (since toField might itself call convertList) */ const convertList = (list: Array): List => { - let target = new List(), result: Opt; + const target = new List(); + let result: Opt; list.map(item => (result = toField(item)) && target.push(result)); return target; }; @@ -658,11 +660,11 @@ export namespace Docs { } if (type.indexOf("html") !== -1) { if (path.includes(window.location.hostname)) { - let s = path.split('/'); - let id = s[s.length - 1]; + const s = path.split('/'); + const id = s[s.length - 1]; return DocServer.GetRefField(id).then(field => { if (field instanceof Doc) { - let alias = Doc.MakeAlias(field); + const alias = Doc.MakeAlias(field); alias.x = options.x || 0; alias.y = options.y || 0; alias.width = options.width || 300; @@ -699,9 +701,9 @@ export namespace DocUtils { DocListCastAsync(promoteDoc.links).then(links => { links && links.map(async link => { if (link) { - let a1 = await Cast(link.anchor1, Doc); + const a1 = await Cast(link.anchor1, Doc); if (a1 && Doc.AreProtosEqual(a1, promoteDoc)) link.anchor1 = copy; - let a2 = await Cast(link.anchor2, Doc); + const a2 = await Cast(link.anchor2, Doc); if (a2 && Doc.AreProtosEqual(a2, promoteDoc)) link.anchor2 = copy; LinkManager.Instance.deleteLink(link); LinkManager.Instance.addLink(link); @@ -714,11 +716,11 @@ export namespace DocUtils { } export function MakeLink(source: { doc: Doc, ctx?: Doc }, target: { doc: Doc, ctx?: Doc }, title: string = "", description: string = "", id?: string) { - let sv = DocumentManager.Instance.getDocumentView(source.doc); + const sv = DocumentManager.Instance.getDocumentView(source.doc); if (sv && sv.props.ContainingCollectionDoc === target.doc) return; if (target.doc === CurrentUserUtils.UserDocument) return undefined; - let linkDocProto = new Doc(id, true); + const linkDocProto = new Doc(id, true); UndoManager.RunInBatch(() => { linkDocProto.type = DocumentType.LINK; diff --git a/src/client/northstar/dash-fields/HistogramField.ts b/src/client/northstar/dash-fields/HistogramField.ts index e6f32272e..f3365e73d 100644 --- a/src/client/northstar/dash-fields/HistogramField.ts +++ b/src/client/northstar/dash-fields/HistogramField.ts @@ -10,7 +10,7 @@ import { Deserializable } from "../../util/SerializationHelper"; import { Copy, ToScriptString } from "../../../new_fields/FieldSymbols"; function serialize(field: HistogramField) { - let obj = OmitKeys(field, ['Links', 'BrushLinks', 'Result', 'BrushColors', 'FilterModels', 'FilterOperand']).omit; + const obj = OmitKeys(field, ['Links', 'BrushLinks', 'Result', 'BrushColors', 'FilterModels', 'FilterOperand']).omit; return obj; } @@ -19,7 +19,7 @@ function deserialize(jp: any) { let Y: AttributeTransformationModel | undefined; let V: AttributeTransformationModel | undefined; - let schema = CurrentUserUtils.GetNorthstarSchema(jp.SchemaName); + const schema = CurrentUserUtils.GetNorthstarSchema(jp.SchemaName); if (schema) { CurrentUserUtils.GetAllNorthstarColumnAttributes(schema).map(attr => { if (attr.displayName === jp.X.AttributeModel.Attribute.DisplayName) { @@ -52,8 +52,8 @@ export class HistogramField extends ObjectField { } [Copy]() { - let y = this.HistoOp; - let z = this.HistoOp.Copy; + // const y = this.HistoOp; + // const z = this.HistoOp.Copy; return new HistogramField(HistogramOperation.Duplicate(this.HistoOp)); } diff --git a/src/client/northstar/model/binRanges/QuantitativeVisualBinRange.ts b/src/client/northstar/model/binRanges/QuantitativeVisualBinRange.ts index c579c8e5f..7bc097e1d 100644 --- a/src/client/northstar/model/binRanges/QuantitativeVisualBinRange.ts +++ b/src/client/northstar/model/binRanges/QuantitativeVisualBinRange.ts @@ -37,7 +37,7 @@ export class QuantitativeVisualBinRange extends VisualBinRange { } public GetBins(): number[] { - let bins = new Array(); + const bins = new Array(); for (let v: number = this.DataBinRange.minValue!; v < this.DataBinRange.maxValue!; v += this.DataBinRange.step!) { bins.push(v); @@ -46,8 +46,8 @@ export class QuantitativeVisualBinRange extends VisualBinRange { } public static Initialize(dataMinValue: number, dataMaxValue: number, targetBinNumber: number, isIntegerRange: boolean): QuantitativeVisualBinRange { - let extent = QuantitativeVisualBinRange.getExtent(dataMinValue, dataMaxValue, targetBinNumber, isIntegerRange); - let dataBinRange = new QuantitativeBinRange(); + const extent = QuantitativeVisualBinRange.getExtent(dataMinValue, dataMaxValue, targetBinNumber, isIntegerRange); + const dataBinRange = new QuantitativeBinRange(); dataBinRange.minValue = extent[0]; dataBinRange.maxValue = extent[1]; dataBinRange.step = extent[2]; @@ -60,10 +60,10 @@ export class QuantitativeVisualBinRange extends VisualBinRange { // dataMin -= 0.1; dataMax += 0.1; } - let span = dataMax - dataMin; + const span = dataMax - dataMin; let step = Math.pow(10, Math.floor(Math.log10(span / m))); - let err = m / span * step; + const err = m / span * step; if (err <= .15) { step *= 10; @@ -78,9 +78,9 @@ export class QuantitativeVisualBinRange extends VisualBinRange { if (isIntegerRange) { step = Math.ceil(step); } - let ret: number[] = new Array(3); - let minDivStep = Math.floor(dataMin / step); - let maxDivStep = Math.floor(dataMax / step); + const ret: number[] = new Array(3); + const minDivStep = Math.floor(dataMin / step); + const maxDivStep = Math.floor(dataMax / step); ret[0] = minDivStep * step; // Math.floor(Math.Round(dataMin, 8)/step)*step; ret[1] = maxDivStep * step + step; // Math.floor(Math.Round(dataMax, 8)/step)*step + step; ret[2] = step; diff --git a/src/client/northstar/operations/BaseOperation.ts b/src/client/northstar/operations/BaseOperation.ts index 0d1361ebf..013f2244e 100644 --- a/src/client/northstar/operations/BaseOperation.ts +++ b/src/client/northstar/operations/BaseOperation.ts @@ -44,12 +44,12 @@ export abstract class BaseOperation { } } - let operationParameters = this.CreateOperationParameters(); + const operationParameters = this.CreateOperationParameters(); if (this.Result) { this.Result.progress = 0; } // bcz: used to set Result to undefined, but that causes the display to blink this.Error = ""; - let salt = Math.random().toString(); + const salt = Math.random().toString(); this.RequestSalt = salt; if (!operationParameters) { @@ -59,27 +59,27 @@ export abstract class BaseOperation { this.ComputationStarted = true; //let start = performance.now(); - let promise = Gateway.Instance.StartOperation(operationParameters.toJSON()); + const promise = Gateway.Instance.StartOperation(operationParameters.toJSON()); promise.catch(err => { action(() => { this.Error = err; console.error(err); }); }); - let operationReference = await promise; + const operationReference = await promise; if (operationReference) { this.OperationReference = operationReference; - let resultParameters = new ResultParameters(); + const resultParameters = new ResultParameters(); resultParameters.operationReference = operationReference; - let pollPromise = new PollPromise(salt, operationReference); + const pollPromise = new PollPromise(salt, operationReference); BaseOperation._currentOperations.set(this.Id, pollPromise); pollPromise.Start(async () => { - let result = await Gateway.Instance.GetResult(resultParameters.toJSON()); + const result = await Gateway.Instance.GetResult(resultParameters.toJSON()); if (result instanceof ErrorResult) { throw new Error((result).message); } diff --git a/src/client/northstar/utils/MathUtil.ts b/src/client/northstar/utils/MathUtil.ts index 4b44f40c3..5def5e704 100644 --- a/src/client/northstar/utils/MathUtil.ts +++ b/src/client/northstar/utils/MathUtil.ts @@ -92,37 +92,37 @@ export class MathUtil { public static DistToLineSegment(v: PIXIPoint, w: PIXIPoint, p: PIXIPoint) { // Return minimum distance between line segment vw and point p - var l2 = MathUtil.DistSquared(v, w); // i.e. |w-v|^2 - avoid a sqrt + const l2 = MathUtil.DistSquared(v, w); // i.e. |w-v|^2 - avoid a sqrt if (l2 === 0.0) return MathUtil.Dist(p, v); // v === w case // Consider the line extending the segment, parameterized as v + t (w - v). // We find projection of point p onto the line. // It falls where t = [(p-v) . (w-v)] / |w-v|^2 // We clamp t from [0,1] to handle points outside the segment vw. - var dot = MathUtil.Dot( + const dot = MathUtil.Dot( MathUtil.SubtractPoint(p, v), MathUtil.SubtractPoint(w, v)) / l2; - var t = Math.max(0, Math.min(1, dot)); + const t = Math.max(0, Math.min(1, dot)); // Projection falls on the segment - var projection = MathUtil.AddPoint(v, + const projection = MathUtil.AddPoint(v, MathUtil.MultiplyConstant( MathUtil.SubtractPoint(w, v), t)); return MathUtil.Dist(p, projection); } public static LineSegmentIntersection(ps1: PIXIPoint, pe1: PIXIPoint, ps2: PIXIPoint, pe2: PIXIPoint): PIXIPoint | undefined { - var a1 = pe1.y - ps1.y; - var b1 = ps1.x - pe1.x; + const a1 = pe1.y - ps1.y; + const b1 = ps1.x - pe1.x; - var a2 = pe2.y - ps2.y; - var b2 = ps2.x - pe2.x; + const a2 = pe2.y - ps2.y; + const b2 = ps2.x - pe2.x; - var delta = a1 * b2 - a2 * b1; + const delta = a1 * b2 - a2 * b1; if (delta === 0) { return undefined; } - var c2 = a2 * ps2.x + b2 * ps2.y; - var c1 = a1 * ps1.x + b1 * ps1.y; - var invdelta = 1 / delta; + const c2 = a2 * ps2.x + b2 * ps2.y; + const c1 = a1 * ps1.x + b1 * ps1.y; + const invdelta = 1 / delta; return new PIXIPoint((b2 * c1 - b1 * c2) * invdelta, (a1 * c2 - a2 * c1) * invdelta); } @@ -144,13 +144,13 @@ export class MathUtil { } public static LinePIXIRectangleIntersection(lineFrom: PIXIPoint, lineTo: PIXIPoint, rect: PIXIRectangle): Array { - var r1 = new PIXIPoint(rect.left, rect.top); - var r2 = new PIXIPoint(rect.right, rect.top); - var r3 = new PIXIPoint(rect.right, rect.bottom); - var r4 = new PIXIPoint(rect.left, rect.bottom); - var ret = new Array(); - var dist = this.Dist(lineFrom, lineTo); - var inter = this.LineSegmentIntersection(lineFrom, lineTo, r1, r2); + const r1 = new PIXIPoint(rect.left, rect.top); + const r2 = new PIXIPoint(rect.right, rect.top); + const r3 = new PIXIPoint(rect.right, rect.bottom); + const r4 = new PIXIPoint(rect.left, rect.bottom); + const ret = new Array(); + const dist = this.Dist(lineFrom, lineTo); + let inter = this.LineSegmentIntersection(lineFrom, lineTo, r1, r2); if (inter && this.PointInPIXIRectangle(inter, rect) && this.Dist(inter, lineFrom) < dist && this.Dist(inter, lineTo) < dist) { ret.push(inter); @@ -190,7 +190,7 @@ export class MathUtil { } public static Normalize(p1: PIXIPoint) { - var d = this.Length(p1); + const d = this.Length(p1); return new PIXIPoint(p1.x / d, p1.y / d); } @@ -236,8 +236,8 @@ export class MathUtil { } public static Combinations(chars: T[]) { - let result = new Array(); - let f = (prefix: any, chars: any) => { + const result = new Array(); + const f = (prefix: any, chars: any) => { for (let i = 0; i < chars.length; i++) { result.push(prefix.concat(chars[i])); f(prefix.concat(chars[i]), chars.slice(i + 1)); diff --git a/src/client/util/DictationManager.ts b/src/client/util/DictationManager.ts index 6bbd3d0ed..3d8f2d234 100644 --- a/src/client/util/DictationManager.ts +++ b/src/client/util/DictationManager.ts @@ -11,7 +11,6 @@ import { Cast, CastCtor } from "../../new_fields/Types"; import { listSpec } from "../../new_fields/Schema"; import { AudioField, ImageField } from "../../new_fields/URLField"; import { HistogramField } from "../northstar/dash-fields/HistogramField"; -import { MainView } from "../views/MainView"; import { Utils } from "../../Utils"; import { RichTextField } from "../../new_fields/RichTextField"; import { DictationOverlay } from "../views/DictationOverlay"; @@ -48,7 +47,7 @@ export namespace DictationManager { export const Infringed = "unable to process: dictation manager still involved in previous session"; const browser = (() => { - let identifier = navigator.userAgent.toLowerCase(); + const identifier = navigator.userAgent.toLowerCase(); if (identifier.indexOf("safari") >= 0) { return "Safari"; } @@ -90,7 +89,7 @@ export namespace DictationManager { export const listen = async (options?: Partial) => { let results: string | undefined; - let overlay = options !== undefined && options.useOverlay; + const overlay = options !== undefined && options.useOverlay; if (overlay) { DictationOverlay.Instance.dictationOverlayVisible = true; DictationOverlay.Instance.isListening = { interim: false }; @@ -102,7 +101,7 @@ export namespace DictationManager { Utils.CopyText(results); if (overlay) { DictationOverlay.Instance.isListening = false; - let execute = options && options.tryExecute; + const execute = options && options.tryExecute; DictationOverlay.Instance.dictatedPhrase = execute ? results.toLowerCase() : results; DictationOverlay.Instance.dictationSuccess = execute ? await DictationManager.Commands.execute(results) : true; } @@ -131,12 +130,12 @@ export namespace DictationManager { } isListening = true; - let handler = options ? options.interimHandler : undefined; - let continuous = options ? options.continuous : undefined; - let indefinite = continuous && continuous.indefinite; - let language = options ? options.language : undefined; - let intra = options && options.delimiters ? options.delimiters.intra : undefined; - let inter = options && options.delimiters ? options.delimiters.inter : undefined; + const handler = options ? options.interimHandler : undefined; + const continuous = options ? options.continuous : undefined; + const indefinite = continuous && continuous.indefinite; + const language = options ? options.language : undefined; + const intra = options && options.delimiters ? options.delimiters.intra : undefined; + const inter = options && options.delimiters ? options.delimiters.inter : undefined; recognizer.onstart = () => console.log("initiating speech recognition session..."); recognizer.interimResults = handler !== undefined; @@ -177,7 +176,7 @@ export namespace DictationManager { recognizer.start(); }; - let complete = () => { + const complete = () => { if (indefinite) { current && sessionResults.push(current); sessionResults.length && resolve(sessionResults.join(inter || interSession)); @@ -213,8 +212,8 @@ export namespace DictationManager { }; const synthesize = (e: SpeechRecognitionEvent, delimiter?: string) => { - let results = e.results; - let transcripts: string[] = []; + const results = e.results; + const transcripts: string[] = []; for (let i = 0; i < results.length; i++) { transcripts.push(results.item(i).item(0).transcript.trim()); } @@ -238,18 +237,18 @@ export namespace DictationManager { export const execute = async (phrase: string) => { return UndoManager.RunInBatch(async () => { - let targets = SelectionManager.SelectedDocuments(); + const targets = SelectionManager.SelectedDocuments(); if (!targets || !targets.length) { return; } phrase = phrase.toLowerCase(); - let entry = Independent.get(phrase); + const entry = Independent.get(phrase); if (entry) { let success = false; - let restrictTo = entry.restrictTo; - for (let target of targets) { + const restrictTo = entry.restrictTo; + for (const target of targets) { if (!restrictTo || validate(target, restrictTo)) { await entry.action(target); success = true; @@ -258,14 +257,14 @@ export namespace DictationManager { return success; } - for (let entry of Dependent) { - let regex = entry.expression; - let matches = regex.exec(phrase); + for (const entry of Dependent) { + const regex = entry.expression; + const matches = regex.exec(phrase); regex.lastIndex = 0; if (matches !== null) { let success = false; - let restrictTo = entry.restrictTo; - for (let target of targets) { + const restrictTo = entry.restrictTo; + for (const target of targets) { if (!restrictTo || validate(target, restrictTo)) { await entry.action(target, matches); success = true; @@ -289,7 +288,7 @@ export namespace DictationManager { ]); const tryCast = (view: DocumentView, type: DocumentType) => { - let ctor = ConstructorMap.get(type); + const ctor = ConstructorMap.get(type); if (!ctor) { return false; } @@ -297,7 +296,7 @@ export namespace DictationManager { }; const validate = (target: DocumentView, types: DocumentType[]) => { - for (let type of types) { + for (const type of types) { if (tryCast(target, type)) { return true; } @@ -306,11 +305,11 @@ export namespace DictationManager { }; const interpretNumber = (number: string) => { - let initial = parseInt(number); + const initial = parseInt(number); if (!isNaN(initial)) { return initial; } - let converted = interpreter.wordsToNumbers(number, { fuzzy: true }); + const converted = interpreter.wordsToNumbers(number, { fuzzy: true }); if (converted === null) { return NaN; } @@ -326,20 +325,20 @@ export namespace DictationManager { ["open fields", { action: (target: DocumentView) => { - let kvp = Docs.Create.KVPDocument(target.props.Document, { width: 300, height: 300 }); + const kvp = Docs.Create.KVPDocument(target.props.Document, { width: 300, height: 300 }); target.props.addDocTab(kvp, target.props.DataDoc, "onRight"); } }], ["new outline", { action: (target: DocumentView) => { - let newBox = Docs.Create.TextDocument({ width: 400, height: 200, title: "My Outline" }); + const newBox = Docs.Create.TextDocument({ width: 400, height: 200, title: "My Outline" }); newBox.autoHeight = true; - let proto = newBox.proto!; - let prompt = "Press alt + r to start dictating here..."; - let head = 3; - let anchor = head + prompt.length; - let proseMirrorState = `{"doc":{"type":"doc","content":[{"type":"bullet_list","content":[{"type":"list_item","content":[{"type":"paragraph","content":[{"type":"text","text":"${prompt}"}]}]}]}]},"selection":{"type":"text","anchor":${anchor},"head":${head}}}`; + const proto = newBox.proto!; + const prompt = "Press alt + r to start dictating here..."; + const head = 3; + const anchor = head + prompt.length; + const proseMirrorState = `{"doc":{"type":"doc","content":[{"type":"bullet_list","content":[{"type":"list_item","content":[{"type":"paragraph","content":[{"type":"text","text":"${prompt}"}]}]}]}]},"selection":{"type":"text","anchor":${anchor},"head":${head}}}`; proto.data = new RichTextField(proseMirrorState); proto.backgroundColor = "#eeffff"; target.props.addDocTab(newBox, proto, "onRight"); @@ -353,10 +352,10 @@ export namespace DictationManager { { expression: /create (\w+) documents of type (image|nested collection)/g, action: (target: DocumentView, matches: RegExpExecArray) => { - let count = interpretNumber(matches[1]); - let what = matches[2]; - let dataDoc = Doc.GetProto(target.props.Document); - let fieldKey = "data"; + const count = interpretNumber(matches[1]); + const what = matches[2]; + const dataDoc = Doc.GetProto(target.props.Document); + const fieldKey = "data"; if (isNaN(count)) { return; } @@ -379,7 +378,7 @@ export namespace DictationManager { { expression: /view as (freeform|stacking|masonry|schema|tree)/g, action: (target: DocumentView, matches: RegExpExecArray) => { - let mode = CollectionViewType.valueOf(matches[1]); + const mode = CollectionViewType.valueOf(matches[1]); mode && (target.props.Document.viewType = mode); }, restrictTo: [DocumentType.COL] diff --git a/src/client/util/DocumentManager.ts b/src/client/util/DocumentManager.ts index 346e88f40..d491cd1b1 100644 --- a/src/client/util/DocumentManager.ts +++ b/src/client/util/DocumentManager.ts @@ -33,7 +33,7 @@ export class DocumentManager { //gets all views public getDocumentViewsById(id: string) { - let toReturn: DocumentView[] = []; + const toReturn: DocumentView[] = []; DocumentManager.Instance.DocumentViews.map(view => { if (view.props.Document[Id] === id) { toReturn.push(view); @@ -41,7 +41,7 @@ export class DocumentManager { }); if (toReturn.length === 0) { DocumentManager.Instance.DocumentViews.map(view => { - let doc = view.props.Document.proto; + const doc = view.props.Document.proto; if (doc && doc[Id] && doc[Id] === id) { toReturn.push(view); } @@ -57,9 +57,9 @@ export class DocumentManager { public getDocumentViewById(id: string, preferredCollection?: CollectionView): DocumentView | undefined { let toReturn: DocumentView | undefined; - let passes = preferredCollection ? [preferredCollection, undefined] : [undefined]; + const passes = preferredCollection ? [preferredCollection, undefined] : [undefined]; - for (let pass of passes) { + for (const pass of passes) { DocumentManager.Instance.DocumentViews.map(view => { if (view.props.Document[Id] === id && (!pass || view.props.ContainingCollectionView === preferredCollection)) { toReturn = view; @@ -68,7 +68,7 @@ export class DocumentManager { }); if (!toReturn) { DocumentManager.Instance.DocumentViews.map(view => { - let doc = view.props.Document.proto; + const doc = view.props.Document.proto; if (doc && doc[Id] === id && (!pass || view.props.ContainingCollectionView === preferredCollection)) { toReturn = view; } @@ -90,7 +90,7 @@ export class DocumentManager { return views.length ? views[0] : undefined; } public getDocumentViews(toFind: Doc): DocumentView[] { - let toReturn: DocumentView[] = []; + const toReturn: DocumentView[] = []; DocumentManager.Instance.DocumentViews.map(view => Doc.AreProtosEqual(view.props.Document, toFind) && toReturn.push(view)); @@ -100,17 +100,17 @@ export class DocumentManager { @computed public get LinkedDocumentViews() { - let pairs = DocumentManager.Instance.DocumentViews.filter(dv => + const pairs = DocumentManager.Instance.DocumentViews.filter(dv => (dv.isSelected() || Doc.IsBrushed(dv.props.Document)) // draw links from DocumentViews that are selected or brushed OR || DocumentManager.Instance.DocumentViews.some(dv2 => { // Documentviews which - let rest = DocListCast(dv2.props.Document.links).some(l => Doc.AreProtosEqual(l, dv.props.Document));// are link doc anchors - let init = (dv2.isSelected() || Doc.IsBrushed(dv2.props.Document)) && dv2.Document.type !== DocumentType.AUDIO; // on a view that is selected or brushed + const rest = DocListCast(dv2.props.Document.links).some(l => Doc.AreProtosEqual(l, dv.props.Document));// are link doc anchors + const init = (dv2.isSelected() || Doc.IsBrushed(dv2.props.Document)) && dv2.Document.type !== DocumentType.AUDIO; // on a view that is selected or brushed return init && rest; }) ).reduce((pairs, dv) => { - let linksList = LinkManager.Instance.getAllRelatedLinks(dv.props.Document); + const linksList = LinkManager.Instance.getAllRelatedLinks(dv.props.Document); pairs.push(...linksList.reduce((pairs, link) => { - let linkToDoc = link && LinkManager.Instance.getOppositeAnchor(link, dv.props.Document); + const linkToDoc = link && LinkManager.Instance.getOppositeAnchor(link, dv.props.Document); linkToDoc && DocumentManager.Instance.getDocumentViews(linkToDoc).map(docView1 => { if (dv.props.Document.type !== DocumentType.LINK || dv.props.layoutKey !== docView1.props.layoutKey) { pairs.push({ a: dv, b: docView1, l: link }); @@ -125,7 +125,7 @@ export class DocumentManager { } public jumpToDocument = async (targetDoc: Doc, willZoom: boolean, dockFunc?: (doc: Doc) => void, docContext?: Doc, linkId?: string, closeContextIfNotFound: boolean = false): Promise => { - let highlight = () => { + const highlight = () => { const finalDocView = DocumentManager.Instance.getFirstDocumentView(targetDoc); finalDocView && (finalDocView.Document.scrollToLinkID = linkId); finalDocView && Doc.linkFollowHighlight(finalDocView.props.Document); @@ -199,12 +199,12 @@ export class DocumentManager { @action zoomIntoScale = (docDelegate: Doc, scale: number) => { - let docView = DocumentManager.Instance.getDocumentView(Doc.GetProto(docDelegate)); + const docView = DocumentManager.Instance.getDocumentView(Doc.GetProto(docDelegate)); docView && docView.props.zoomToScale(scale); } getScaleOfDocView = (docDelegate: Doc) => { - let doc = Doc.GetProto(docDelegate); + const doc = Doc.GetProto(docDelegate); const docView = DocumentManager.Instance.getDocumentView(doc); if (docView) { diff --git a/src/client/util/DragManager.ts b/src/client/util/DragManager.ts index bbc29585c..b681387d1 100644 --- a/src/client/util/DragManager.ts +++ b/src/client/util/DragManager.ts @@ -1,7 +1,6 @@ import { action, runInAction } from "mobx"; import { Doc, Field } from "../../new_fields/Doc"; -import { Cast, StrCast, ScriptCast } from "../../new_fields/Types"; -import { URLField } from "../../new_fields/URLField"; +import { Cast, ScriptCast } from "../../new_fields/Types"; import { emptyFunction } from "../../Utils"; import { CollectionDockingView } from "../views/collections/CollectionDockingView"; import * as globalCssVariables from "../views/globalCssVariables.scss"; @@ -27,14 +26,14 @@ export function SetupDrag( dontHideOnDrop?: boolean, dragStarted?: () => void ) { - let onRowMove = async (e: PointerEvent) => { + const onRowMove = async (e: PointerEvent) => { e.stopPropagation(); e.preventDefault(); document.removeEventListener("pointermove", onRowMove); document.removeEventListener('pointerup', onRowUp); - let doc = await docFunc(); - var dragData = new DragManager.DocumentDragData([doc]); + const doc = await docFunc(); + const dragData = new DragManager.DocumentDragData([doc]); dragData.dropAction = dropAction; dragData.moveDocument = moveFunc; dragData.options = options; @@ -42,11 +41,11 @@ export function SetupDrag( DragManager.StartDocumentDrag([_reference.current!], dragData, e.x, e.y); dragStarted && dragStarted(); }; - let onRowUp = (): void => { + const onRowUp = (): void => { document.removeEventListener("pointermove", onRowMove); document.removeEventListener('pointerup', onRowUp); }; - let onItemDown = async (e: React.PointerEvent) => { + const onItemDown = async (e: React.PointerEvent) => { if (e.button === 0) { e.stopPropagation(); if (e.shiftKey && CollectionDockingView.Instance) { @@ -74,11 +73,11 @@ function moveLinkedDocument(doc: Doc, targetCollection: Doc, addDocument: (doc: } export async function DragLinkAsDocument(dragEle: HTMLElement, x: number, y: number, linkDoc: Doc, sourceDoc: Doc) { - let draggeddoc = LinkManager.Instance.getOppositeAnchor(linkDoc, sourceDoc); + const draggeddoc = LinkManager.Instance.getOppositeAnchor(linkDoc, sourceDoc); if (draggeddoc) { - let moddrag = await Cast(draggeddoc.annotationOn, Doc); - let dragdocs = moddrag ? [moddrag] : [draggeddoc]; - let dragData = new DragManager.DocumentDragData(dragdocs); + const moddrag = await Cast(draggeddoc.annotationOn, Doc); + const dragdocs = moddrag ? [moddrag] : [draggeddoc]; + const dragData = new DragManager.DocumentDragData(dragdocs); dragData.moveDocument = moveLinkedDocument; DragManager.StartLinkedDocumentDrag([dragEle], dragData, x, y, { handlers: { @@ -90,26 +89,26 @@ export async function DragLinkAsDocument(dragEle: HTMLElement, x: number, y: num } export async function DragLinksAsDocuments(dragEle: HTMLElement, x: number, y: number, sourceDoc: Doc, singleLink?: Doc) { - let srcTarg = sourceDoc.proto; + const srcTarg = sourceDoc.proto; let draggedDocs: Doc[] = []; if (srcTarg) { - let linkDocs = singleLink ? [singleLink] : LinkManager.Instance.getAllRelatedLinks(srcTarg); + const linkDocs = singleLink ? [singleLink] : LinkManager.Instance.getAllRelatedLinks(srcTarg); if (linkDocs) { draggedDocs = linkDocs.map(link => { - let opp = LinkManager.Instance.getOppositeAnchor(link, sourceDoc); + const opp = LinkManager.Instance.getOppositeAnchor(link, sourceDoc); if (opp) return opp; }) as Doc[]; } } if (draggedDocs.length) { - let moddrag: Doc[] = []; + const moddrag: Doc[] = []; for (const draggedDoc of draggedDocs) { - let doc = await Cast(draggedDoc.annotationOn, Doc); + const doc = await Cast(draggedDoc.annotationOn, Doc); if (doc) moddrag.push(doc); } - let dragdocs = moddrag.length ? moddrag : draggedDocs; - let dragData = new DragManager.DocumentDragData(dragdocs); + const dragdocs = moddrag.length ? moddrag : draggedDocs; + const dragData = new DragManager.DocumentDragData(dragdocs); dragData.moveDocument = moveLinkedDocument; DragManager.StartLinkedDocumentDrag([dragEle], dragData, x, y, { handlers: { @@ -254,11 +253,11 @@ export namespace DragManager { } export function StartButtonDrag(eles: HTMLElement[], script: string, title: string, vars: { [name: string]: Field }, params: string[], initialize: (button: Doc) => void, downX: number, downY: number, options?: DragOptions) { - let dragData = new DragManager.DocumentDragData([]); + const dragData = new DragManager.DocumentDragData([]); runInAction(() => StartDragFunctions.map(func => func())); StartDrag(eles, dragData, downX, downY, options, options && options.finishDrag ? options.finishDrag : (dropData: { [id: string]: any }) => { - let bd = Docs.Create.ButtonDocument({ width: 150, height: 50, title: title }); + const bd = Docs.Create.ButtonDocument({ width: 150, height: 50, title: title }); bd.onClick = ScriptField.MakeScript(script); params.map(p => Object.keys(vars).indexOf(p) !== -1 && (Doc.GetProto(bd)[p] = new PrefetchProxy(vars[p] as Doc))); initialize && initialize(bd); @@ -273,11 +272,11 @@ export namespace DragManager { runInAction(() => StartDragFunctions.map(func => func())); StartDrag(eles, dragData, downX, downY, options, (dropData: { [id: string]: any }) => { - let droppedDocuments: Doc[] = dragData.draggedDocuments.reduce((droppedDocs: Doc[], d) => { - let dvs = DocumentManager.Instance.getDocumentViews(d); + const droppedDocuments: Doc[] = dragData.draggedDocuments.reduce((droppedDocs: Doc[], d) => { + const dvs = DocumentManager.Instance.getDocumentViews(d); if (dvs.length) { - let containingView = SelectionManager.SelectedDocuments()[0] ? SelectionManager.SelectedDocuments()[0].props.ContainingCollectionView : undefined; - let inContext = dvs.filter(dv => dv.props.ContainingCollectionView === containingView); + const containingView = SelectionManager.SelectedDocuments()[0] ? SelectionManager.SelectedDocuments()[0].props.ContainingCollectionView : undefined; + const inContext = dvs.filter(dv => dv.props.ContainingCollectionView === containingView); if (inContext.length) { inContext.forEach(dv => droppedDocs.push(dv.props.Document)); } else { @@ -336,26 +335,26 @@ export namespace DragManager { DragManager.Root().appendChild(dragDiv); } SelectionManager.SetIsDragging(true); - let scaleXs: number[] = []; - let scaleYs: number[] = []; - let xs: number[] = []; - let ys: number[] = []; + const scaleXs: number[] = []; + const scaleYs: number[] = []; + const xs: number[] = []; + const ys: number[] = []; const docs = dragData instanceof DocumentDragData ? dragData.draggedDocuments : dragData instanceof AnnotationDragData ? [dragData.dragDocument] : []; - let dragElements = eles.map(ele => { + const dragElements = eles.map(ele => { const w = ele.offsetWidth, h = ele.offsetHeight; const rect = ele.getBoundingClientRect(); const scaleX = rect.width / w, scaleY = rect.height / h; - let x = rect.left, + const x = rect.left, y = rect.top; xs.push(x); ys.push(y); scaleXs.push(scaleX); scaleYs.push(scaleY); - let dragElement = ele.cloneNode(true) as HTMLElement; + const dragElement = ele.cloneNode(true) as HTMLElement; dragElement.style.opacity = "0.7"; dragElement.style.borderRadius = getComputedStyle(ele).borderRadius; dragElement.style.position = "absolute"; @@ -372,25 +371,25 @@ export namespace DragManager { dragElement.style.height = `${rect.height / scaleY}px`; if (docs.length) { - var pdfBox = dragElement.getElementsByTagName("canvas"); - var pdfBoxSrc = ele.getElementsByTagName("canvas"); + const pdfBox = dragElement.getElementsByTagName("canvas"); + const pdfBoxSrc = ele.getElementsByTagName("canvas"); Array.from(pdfBox).map((pb, i) => pb.getContext('2d')!.drawImage(pdfBoxSrc[i], 0, 0)); - var pdfView = dragElement.getElementsByClassName("pdfViewer-viewer"); - var pdfViewSrc = ele.getElementsByClassName("pdfViewer-viewer"); - let tops = Array.from(pdfViewSrc).map(p => p.scrollTop); - let oldopacity = dragElement.style.opacity; + const pdfView = dragElement.getElementsByClassName("pdfViewer-viewer"); + const pdfViewSrc = ele.getElementsByClassName("pdfViewer-viewer"); + const tops = Array.from(pdfViewSrc).map(p => p.scrollTop); + const oldopacity = dragElement.style.opacity; dragElement.style.opacity = "0"; setTimeout(() => { dragElement.style.opacity = oldopacity; Array.from(pdfView).map((v, i) => v.scrollTo({ top: tops[i] })); }, 0); } - let set = dragElement.getElementsByTagName('*'); + const set = dragElement.getElementsByTagName('*'); if (dragElement.hasAttribute("style")) (dragElement as any).style.pointerEvents = "none"; // tslint:disable-next-line: prefer-for-of for (let i = 0; i < set.length; i++) { if (set[i].hasAttribute("style")) { - let s = set[i]; + const s = set[i]; (s as any).style.pointerEvents = "none"; } } @@ -429,8 +428,8 @@ export namespace DragManager { }, dragData.droppedDocuments); } //TODO: Why can't we use e.movementX and e.movementY? - let moveX = e.pageX - lastX; - let moveY = e.pageY - lastY; + const moveX = e.pageX - lastX; + const moveY = e.pageY - lastY; lastX = e.pageX; lastY = e.pageY; dragElements.map((dragElement, i) => (dragElement.style.transform = @@ -438,11 +437,11 @@ export namespace DragManager { ); }; - let hideDragShowOriginalElements = () => { + const hideDragShowOriginalElements = () => { dragElements.map(dragElement => dragElement.parentNode === dragDiv && dragDiv.removeChild(dragElement)); eles.map(ele => ele.hidden = false); }; - let endDrag = () => { + const endDrag = () => { document.removeEventListener("pointermove", moveHandler, true); document.removeEventListener("pointerup", upHandler); if (options) { @@ -466,17 +465,17 @@ export namespace DragManager { } function dispatchDrag(dragEles: HTMLElement[], e: PointerEvent, dragData: { [index: string]: any }, options?: DragOptions, finishDrag?: (dragData: { [index: string]: any }) => void) { - let removed = dragData.dontHideOnDrop ? [] : dragEles.map(dragEle => { + const removed = dragData.dontHideOnDrop ? [] : dragEles.map(dragEle => { // let parent = dragEle.parentElement; // if (parent) parent.removeChild(dragEle); - let ret = [dragEle, dragEle.style.width, dragEle.style.height]; + const ret = [dragEle, dragEle.style.width, dragEle.style.height]; dragEle.style.width = "0"; dragEle.style.height = "0"; return ret; }); const target = document.elementFromPoint(e.x, e.y); removed.map(r => { - let dragEle = r[0] as HTMLElement; + const dragEle = r[0] as HTMLElement; dragEle.style.width = r[1] as string; dragEle.style.height = r[2] as string; // let parent = r[1]; diff --git a/src/client/util/DropConverter.ts b/src/client/util/DropConverter.ts index 6b53333d7..b2c720d5d 100644 --- a/src/client/util/DropConverter.ts +++ b/src/client/util/DropConverter.ts @@ -9,10 +9,10 @@ import { ScriptField } from "../../new_fields/ScriptField"; function makeTemplate(doc: Doc): boolean { - let layoutDoc = doc.layout instanceof Doc && doc.layout.isTemplateField ? doc.layout : doc; - let layout = StrCast(layoutDoc.layout).match(/fieldKey={"[^"]*"}/)![0]; - let fieldKey = layout.replace('fieldKey={"', "").replace(/"}$/, ""); - let docs = DocListCast(layoutDoc[fieldKey]); + const layoutDoc = doc.layout instanceof Doc && doc.layout.isTemplateField ? doc.layout : doc; + const layout = StrCast(layoutDoc.layout).match(/fieldKey={"[^"]*"}/)![0]; + const fieldKey = layout.replace('fieldKey={"', "").replace(/"}$/, ""); + const docs = DocListCast(layoutDoc[fieldKey]); let any = false; docs.map(d => { if (!StrCast(d.title).startsWith("-")) { @@ -28,7 +28,7 @@ export function convertDropDataToButtons(data: DragManager.DocumentDragData) { data && data.draggedDocuments.map((doc, i) => { let dbox = doc; if (!doc.onDragStart && !doc.onClick && doc.viewType !== CollectionViewType.Linear) { - let layoutDoc = doc.layout instanceof Doc && doc.layout.isTemplateField ? doc.layout : doc; + const layoutDoc = doc.layout instanceof Doc && doc.layout.isTemplateField ? doc.layout : doc; if (layoutDoc.type === DocumentType.COL) { layoutDoc.isTemplateDoc = makeTemplate(layoutDoc); } else { diff --git a/src/client/util/History.ts b/src/client/util/History.ts index 1c51236cb..545e8acb4 100644 --- a/src/client/util/History.ts +++ b/src/client/util/History.ts @@ -1,4 +1,4 @@ -import { Doc, Opt, Field } from "../../new_fields/Doc"; +import { Doc } from "../../new_fields/Doc"; import { DocServer } from "../DocServer"; import { MainView } from "../views/MainView"; import * as qs from 'query-string'; @@ -53,7 +53,7 @@ export namespace HistoryUtil { } export function getState(): ParsedUrl { - let state = copyState(history.state); + const state = copyState(history.state); state.initializers = state.initializers || {}; return state; } @@ -160,7 +160,7 @@ export namespace HistoryUtil { const pathname = location.pathname.substring(1); const search = location.search; const opts = search.length ? qs.parse(search, { sort: false }) : {}; - let pathnameSplit = pathname.split("/"); + const pathnameSplit = pathname.split("/"); const type = pathnameSplit[0]; diff --git a/src/client/util/Import & Export/DirectoryImportBox.tsx b/src/client/util/Import & Export/DirectoryImportBox.tsx index 104d9e099..e6a215b2c 100644 --- a/src/client/util/Import & Export/DirectoryImportBox.tsx +++ b/src/client/util/Import & Export/DirectoryImportBox.tsx @@ -1,7 +1,7 @@ import "fs"; import React = require("react"); import { Doc, DocListCast, DocListCastAsync, Opt } from "../../../new_fields/Doc"; -import { action, observable, autorun, runInAction, computed, reaction, IReactionDisposer } from "mobx"; +import { action, observable, runInAction, computed, reaction, IReactionDisposer } from "mobx"; import { FieldViewProps, FieldView } from "../../views/nodes/FieldView"; import Measure, { ContentRect } from "react-measure"; import { library } from '@fortawesome/fontawesome-svg-core'; @@ -48,7 +48,7 @@ export default class DirectoryImportBox extends React.Component constructor(props: FieldViewProps) { super(props); library.add(faTag, faPlus); - let doc = this.props.Document; + const doc = this.props.Document; this.editingMetadata = this.editingMetadata || false; this.persistent = this.persistent || false; !Cast(doc.data, listSpec(Doc)) && (doc.data = new List()); @@ -78,16 +78,16 @@ export default class DirectoryImportBox extends React.Component this.phase = "Initializing download..."; }); - let docs: Doc[] = []; + const docs: Doc[] = []; - let files = e.target.files; + const files = e.target.files; if (!files || files.length === 0) return; - let directory = (files.item(0) as any).webkitRelativePath.split("/", 1)[0]; + const directory = (files.item(0) as any).webkitRelativePath.split("/", 1)[0]; - let validated: File[] = []; + const validated: File[] = []; for (let i = 0; i < files.length; i++) { - let file = files.item(i); + const file = files.item(i); if (file && !unsupported.includes(file.type)) { const ext = path.extname(file.name).toLowerCase(); if (AcceptibleMedia.imageFormats.includes(ext)) { @@ -101,8 +101,8 @@ export default class DirectoryImportBox extends React.Component this.completed = 0; }); - let sizes: number[] = []; - let modifiedDates: number[] = []; + const sizes: number[] = []; + const modifiedDates: number[] = []; runInAction(() => this.phase = `Internal: uploading ${this.quota - this.completed} files to Dash...`); @@ -136,26 +136,26 @@ export default class DirectoryImportBox extends React.Component })); for (let i = 0; i < docs.length; i++) { - let doc = docs[i]; + const doc = docs[i]; doc.size = sizes[i]; doc.modified = modifiedDates[i]; this.entries.forEach(entry => { - let target = entry.onDataDoc ? Doc.GetProto(doc) : doc; + const target = entry.onDataDoc ? Doc.GetProto(doc) : doc; target[entry.key] = entry.value; }); } - let doc = this.props.Document; - let height: number = NumCast(doc.height) || 0; - let offset: number = this.persistent ? (height === 0 ? 0 : height + 30) : 0; - let options: DocumentOptions = { + const doc = this.props.Document; + const height: number = NumCast(doc.height) || 0; + const offset: number = this.persistent ? (height === 0 ? 0 : height + 30) : 0; + const options: DocumentOptions = { title: `Import of ${directory}`, width: 1105, height: 500, x: NumCast(doc.x), y: NumCast(doc.y) + offset }; - let parent = this.props.ContainingCollectionView; + const parent = this.props.ContainingCollectionView; if (parent) { let importContainer: Doc; if (docs.length < 50) { @@ -194,18 +194,18 @@ export default class DirectoryImportBox extends React.Component @action preserveCentering = (rect: ContentRect) => { - let bounds = rect.offset!; + const bounds = rect.offset!; if (bounds.width === 0 || bounds.height === 0) { return; } - let offset = this.dimensions / 2; + const offset = this.dimensions / 2; this.left = bounds.width / 2 - offset; this.top = bounds.height / 2 - offset; } @action addMetadataEntry = async () => { - let entryDoc = new Doc(); + const entryDoc = new Doc(); entryDoc.checked = false; entryDoc.key = keyPlaceholder; entryDoc.value = valuePlaceholder; @@ -214,7 +214,7 @@ export default class DirectoryImportBox extends React.Component @action remove = async (entry: ImportMetadataEntry) => { - let metadata = await DocListCastAsync(this.props.Document.data); + const metadata = await DocListCastAsync(this.props.Document.data); if (metadata) { let index = this.entries.indexOf(entry); if (index !== -1) { @@ -228,18 +228,18 @@ export default class DirectoryImportBox extends React.Component } render() { - let dimensions = 50; - let entries = DocListCast(this.props.Document.data); - let isEditing = this.editingMetadata; - let completed = this.completed; - let quota = this.quota; - let uploading = this.uploading; - let showRemoveLabel = this.removeHover; - let persistent = this.persistent; + const dimensions = 50; + const entries = DocListCast(this.props.Document.data); + const isEditing = this.editingMetadata; + const completed = this.completed; + const quota = this.quota; + const uploading = this.uploading; + const showRemoveLabel = this.removeHover; + const persistent = this.persistent; let percent = `${completed / quota * 100}`; percent = percent.split(".")[0]; percent = percent.startsWith("100") ? "99" : percent; - let marginOffset = (percent.length === 1 ? 5 : 0) - 1.6; + const marginOffset = (percent.length === 1 ? 5 : 0) - 1.6; const message = {this.phase}; const centerPiece = this.phase.includes("Google Photos") ? } render() { - let keyValueStyle: React.CSSProperties = { + const keyValueStyle: React.CSSProperties = { paddingLeft: 10, width: "50%", opacity: this.valid ? 1 : 0.5, diff --git a/src/client/util/InteractionUtils.ts b/src/client/util/InteractionUtils.ts index b7738e862..0c3de66ed 100644 --- a/src/client/util/InteractionUtils.ts +++ b/src/client/util/InteractionUtils.ts @@ -29,8 +29,8 @@ export namespace InteractionUtils { * @param pts - n-arbitrary long list of points */ export function CenterPoint(pts: React.Touch[]): { X: number, Y: number } { - let centerX = pts.map(pt => pt.clientX).reduce((a, b) => a + b, 0) / pts.length; - let centerY = pts.map(pt => pt.clientY).reduce((a, b) => a + b, 0) / pts.length; + const centerX = pts.map(pt => pt.clientX).reduce((a, b) => a + b, 0) / pts.length; + const centerY = pts.map(pt => pt.clientY).reduce((a, b) => a + b, 0) / pts.length; return { X: centerX, Y: centerY }; } @@ -42,9 +42,9 @@ export namespace InteractionUtils { * @param oldPoint2 - previous point 2 */ export function Pinching(pt1: React.Touch, pt2: React.Touch, oldPoint1: React.Touch, oldPoint2: React.Touch): number { - let threshold = 4; - let oldDist = TwoPointEuclidist(oldPoint1, oldPoint2); - let newDist = TwoPointEuclidist(pt1, pt2); + const threshold = 4; + const oldDist = TwoPointEuclidist(oldPoint1, oldPoint2); + const newDist = TwoPointEuclidist(pt1, pt2); /** if they have the same sign, then we are either pinching in or out. * threshold it by 10 (it has to be pinching by at least threshold to be a valid pinch) @@ -64,12 +64,12 @@ export namespace InteractionUtils { * @param oldPoint2 - previous point 2 */ export function Pinning(pt1: React.Touch, pt2: React.Touch, oldPoint1: React.Touch, oldPoint2: React.Touch): number { - let threshold = 4; + const threshold = 4; - let pt1Dist = TwoPointEuclidist(oldPoint1, pt1); - let pt2Dist = TwoPointEuclidist(oldPoint2, pt2); + const pt1Dist = TwoPointEuclidist(oldPoint1, pt1); + const pt2Dist = TwoPointEuclidist(oldPoint2, pt2); - let pinching = Pinching(pt1, pt2, oldPoint1, oldPoint2); + const pinching = Pinching(pt1, pt2, oldPoint1, oldPoint2); if (pinching !== 0) { if ((pt1Dist < threshold && pt2Dist > threshold) || (pt1Dist > threshold && pt2Dist < threshold)) { diff --git a/src/client/util/LinkManager.ts b/src/client/util/LinkManager.ts index eedc4967d..fb6f27478 100644 --- a/src/client/util/LinkManager.ts +++ b/src/client/util/LinkManager.ts @@ -38,16 +38,16 @@ export class LinkManager { } public getAllLinks(): Doc[] { - let ldoc = LinkManager.Instance.LinkManagerDoc; + const ldoc = LinkManager.Instance.LinkManagerDoc; if (ldoc) { - let docs = DocListCast(ldoc.allLinks); + const docs = DocListCast(ldoc.allLinks); return docs; } return []; } public addLink(linkDoc: Doc): boolean { - let linkList = LinkManager.Instance.getAllLinks(); + const linkList = LinkManager.Instance.getAllLinks(); linkList.push(linkDoc); if (LinkManager.Instance.LinkManagerDoc) { LinkManager.Instance.LinkManagerDoc.allLinks = new List(linkList); @@ -57,8 +57,8 @@ export class LinkManager { } public deleteLink(linkDoc: Doc): boolean { - let linkList = LinkManager.Instance.getAllLinks(); - let index = LinkManager.Instance.getAllLinks().indexOf(linkDoc); + const linkList = LinkManager.Instance.getAllLinks(); + const index = LinkManager.Instance.getAllLinks().indexOf(linkDoc); if (index > -1) { linkList.splice(index, 1); if (LinkManager.Instance.LinkManagerDoc) { @@ -71,23 +71,23 @@ export class LinkManager { // finds all links that contain the given anchor public getAllRelatedLinks(anchor: Doc): Doc[] {//List { - let related = LinkManager.Instance.getAllLinks().filter(link => { - let protomatch1 = Doc.AreProtosEqual(anchor, Cast(link.anchor1, Doc, null)); - let protomatch2 = Doc.AreProtosEqual(anchor, Cast(link.anchor2, Doc, null)); + const related = LinkManager.Instance.getAllLinks().filter(link => { + const protomatch1 = Doc.AreProtosEqual(anchor, Cast(link.anchor1, Doc, null)); + const protomatch2 = Doc.AreProtosEqual(anchor, Cast(link.anchor2, Doc, null)); return protomatch1 || protomatch2 || Doc.AreProtosEqual(link, anchor); }); return related; } public deleteAllLinksOnAnchor(anchor: Doc) { - let related = LinkManager.Instance.getAllRelatedLinks(anchor); + const related = LinkManager.Instance.getAllRelatedLinks(anchor); related.forEach(linkDoc => LinkManager.Instance.deleteLink(linkDoc)); } public addGroupType(groupType: string): boolean { if (LinkManager.Instance.LinkManagerDoc) { LinkManager.Instance.LinkManagerDoc[groupType] = new List([]); - let groupTypes = LinkManager.Instance.getAllGroupTypes(); + const groupTypes = LinkManager.Instance.getAllGroupTypes(); groupTypes.push(groupType); LinkManager.Instance.LinkManagerDoc.allGroupTypes = new List(groupTypes); return true; @@ -99,8 +99,8 @@ export class LinkManager { public deleteGroupType(groupType: string): boolean { if (LinkManager.Instance.LinkManagerDoc) { if (LinkManager.Instance.LinkManagerDoc[groupType]) { - let groupTypes = LinkManager.Instance.getAllGroupTypes(); - let index = groupTypes.findIndex(type => type.toUpperCase() === groupType.toUpperCase()); + const groupTypes = LinkManager.Instance.getAllGroupTypes(); + const index = groupTypes.findIndex(type => type.toUpperCase() === groupType.toUpperCase()); if (index > -1) groupTypes.splice(index, 1); LinkManager.Instance.LinkManagerDoc.allGroupTypes = new List(groupTypes); LinkManager.Instance.LinkManagerDoc[groupType] = undefined; @@ -146,8 +146,8 @@ export class LinkManager { } public addGroupToAnchor(linkDoc: Doc, anchor: Doc, groupDoc: Doc, replace: boolean = false) { - let groups = LinkManager.Instance.getAnchorGroups(linkDoc, anchor); - let index = groups.findIndex(gDoc => { + const groups = LinkManager.Instance.getAnchorGroups(linkDoc, anchor); + const index = groups.findIndex(gDoc => { return StrCast(groupDoc.type).toUpperCase() === StrCast(gDoc.type).toUpperCase(); }); if (index > -1 && replace) { @@ -161,32 +161,32 @@ export class LinkManager { // removes group doc of given group type only from given anchor on given link public removeGroupFromAnchor(linkDoc: Doc, anchor: Doc, groupType: string) { - let groups = LinkManager.Instance.getAnchorGroups(linkDoc, anchor); - let newGroups = groups.filter(groupDoc => StrCast(groupDoc.type).toUpperCase() !== groupType.toUpperCase()); + const groups = LinkManager.Instance.getAnchorGroups(linkDoc, anchor); + const newGroups = groups.filter(groupDoc => StrCast(groupDoc.type).toUpperCase() !== groupType.toUpperCase()); LinkManager.Instance.setAnchorGroups(linkDoc, anchor, newGroups); } // returns map of group type to anchor's links in that group type public getRelatedGroupedLinks(anchor: Doc): Map> { - let related = this.getAllRelatedLinks(anchor); - let anchorGroups = new Map>(); + const related = this.getAllRelatedLinks(anchor); + const anchorGroups = new Map>(); related.forEach(link => { - let groups = LinkManager.Instance.getAnchorGroups(link, anchor); + const groups = LinkManager.Instance.getAnchorGroups(link, anchor); if (groups.length > 0) { groups.forEach(groupDoc => { - let groupType = StrCast(groupDoc.type); + const groupType = StrCast(groupDoc.type); if (groupType === "") { - let group = anchorGroups.get("*"); + const group = anchorGroups.get("*"); anchorGroups.set("*", group ? [...group, link] : [link]); } else { - let group = anchorGroups.get(groupType); + const group = anchorGroups.get(groupType); anchorGroups.set(groupType, group ? [...group, link] : [link]); } }); } else { // if link is in no groups then put it in default group - let group = anchorGroups.get("*"); + const group = anchorGroups.get("*"); anchorGroups.set("*", group ? [...group, link] : [link]); } @@ -212,11 +212,11 @@ export class LinkManager { // returns a list of all metadata docs associated with the given group type public getAllMetadataDocsInGroup(groupType: string): Array { - let md: Doc[] = []; - let allLinks = LinkManager.Instance.getAllLinks(); + const md: Doc[] = []; + const allLinks = LinkManager.Instance.getAllLinks(); allLinks.forEach(linkDoc => { - let anchor1Groups = LinkManager.Instance.getAnchorGroups(linkDoc, Cast(linkDoc.anchor1, Doc, null)); - let anchor2Groups = LinkManager.Instance.getAnchorGroups(linkDoc, Cast(linkDoc.anchor2, Doc, null)); + const anchor1Groups = LinkManager.Instance.getAnchorGroups(linkDoc, Cast(linkDoc.anchor1, Doc, null)); + const anchor2Groups = LinkManager.Instance.getAnchorGroups(linkDoc, Cast(linkDoc.anchor2, Doc, null)); anchor1Groups.forEach(groupDoc => { if (StrCast(groupDoc.type).toUpperCase() === groupType.toUpperCase()) { const meta = Cast(groupDoc.metadata, Doc, null); meta && md.push(meta); } }); anchor2Groups.forEach(groupDoc => { if (StrCast(groupDoc.type).toUpperCase() === groupType.toUpperCase()) { const meta = Cast(groupDoc.metadata, Doc, null); meta && md.push(meta); } }); }); @@ -225,8 +225,8 @@ export class LinkManager { // checks if a link with the given anchors exists public doesLinkExist(anchor1: Doc, anchor2: Doc): boolean { - let allLinks = LinkManager.Instance.getAllLinks(); - let index = allLinks.findIndex(linkDoc => { + const allLinks = LinkManager.Instance.getAllLinks(); + const index = allLinks.findIndex(linkDoc => { return (Doc.AreProtosEqual(Cast(linkDoc.anchor1, Doc, null), anchor1) && Doc.AreProtosEqual(Cast(linkDoc.anchor2, Doc, null), anchor2)) || (Doc.AreProtosEqual(Cast(linkDoc.anchor1, Doc, null), anchor2) && Doc.AreProtosEqual(Cast(linkDoc.anchor2, Doc, null), anchor1)); }); @@ -237,8 +237,8 @@ export class LinkManager { //TODO This should probably return undefined if there isn't an opposite anchor //TODO This should also await the return value of the anchor so we don't filter out promises public getOppositeAnchor(linkDoc: Doc, anchor: Doc): Doc | undefined { - let a1 = Cast(linkDoc.anchor1, Doc, null); - let a2 = Cast(linkDoc.anchor2, Doc, null); + const a1 = Cast(linkDoc.anchor1, Doc, null); + const a2 = Cast(linkDoc.anchor2, Doc, null); if (Doc.AreProtosEqual(anchor, a1)) return a2; if (Doc.AreProtosEqual(anchor, a2)) return a1; if (Doc.AreProtosEqual(anchor, linkDoc)) return linkDoc; diff --git a/src/client/util/ProsemirrorExampleTransfer.ts b/src/client/util/ProsemirrorExampleTransfer.ts index 003ff6272..f1fa6f11d 100644 --- a/src/client/util/ProsemirrorExampleTransfer.ts +++ b/src/client/util/ProsemirrorExampleTransfer.ts @@ -4,7 +4,7 @@ import { undoInputRule } from "prosemirror-inputrules"; import { Schema } from "prosemirror-model"; import { liftListItem, sinkListItem } from "./prosemirrorPatches.js"; import { splitListItem, wrapInList, } from "prosemirror-schema-list"; -import { EditorState, Transaction, TextSelection, NodeSelection } from "prosemirror-state"; +import { EditorState, Transaction, TextSelection } from "prosemirror-state"; import { TooltipTextMenu } from "./TooltipTextMenu"; const mac = typeof navigator !== "undefined" ? /Mac/.test(navigator.platform) : false; @@ -15,22 +15,22 @@ export let updateBullets = (tx2: Transaction, schema: Schema, mapStyle?: string) let fontSize: number | undefined = undefined; tx2.doc.descendants((node: any, offset: any, index: any) => { if (node.type === schema.nodes.ordered_list || node.type === schema.nodes.list_item) { - let path = (tx2.doc.resolve(offset) as any).path; + const path = (tx2.doc.resolve(offset) as any).path; let depth = Array.from(path).reduce((p: number, c: any) => p + (c.hasOwnProperty("type") && c.type === schema.nodes.ordered_list ? 1 : 0), 0); if (node.type === schema.nodes.ordered_list) depth++; fontSize = depth === 1 && node.attrs.setFontSize ? Number(node.attrs.setFontSize) : fontSize; - let fsize = fontSize && node.type === schema.nodes.ordered_list ? Math.max(6, fontSize - (depth - 1) * 4) : undefined; + const fsize = fontSize && node.type === schema.nodes.ordered_list ? Math.max(6, fontSize - (depth - 1) * 4) : undefined; tx2.setNodeMarkup(offset, node.type, { ...node.attrs, mapStyle: mapStyle ? mapStyle : node.attrs.mapStyle, bulletStyle: depth, inheritedFontSize: fsize }, node.marks); } }); return tx2; }; export default function buildKeymap>(schema: S, mapKeys?: KeyMap): KeyMap { - let keys: { [key: string]: any } = {}, type; + const keys: { [key: string]: any } = {}; function bind(key: string, cmd: any) { if (mapKeys) { - let mapped = mapKeys[key]; + const mapped = mapKeys[key]; if (mapped === false) return; if (mapped) key = mapped; } @@ -79,7 +79,7 @@ export default function buildKeymap>(schema: S, mapKeys?: // }); - let cmd = chainCommands(exitCode, (state, dispatch) => { + const cmd = chainCommands(exitCode, (state, dispatch) => { if (dispatch) { dispatch(state.tr.replaceSelectionWith(schema.nodes.hard_break.create()).scrollIntoView()); return true; @@ -99,7 +99,7 @@ export default function buildKeymap>(schema: S, mapKeys?: bind("Shift-Ctrl-" + i, setBlockType(schema.nodes.heading, { level: i })); } - let hr = schema.nodes.horizontal_rule; + const hr = schema.nodes.horizontal_rule; bind("Mod-_", (state: EditorState, dispatch: (tx: Transaction) => void) => { dispatch(state.tr.replaceSelectionWith(hr.create()).scrollIntoView()); return true; @@ -108,18 +108,18 @@ export default function buildKeymap>(schema: S, mapKeys?: bind("Mod-s", TooltipTextMenu.insertStar); bind("Tab", (state: EditorState, dispatch: (tx: Transaction) => void) => { - var ref = state.selection; - var range = ref.$from.blockRange(ref.$to); - var marks = state.storedMarks || (state.selection.$to.parentOffset && state.selection.$from.marks()); + const ref = state.selection; + const range = ref.$from.blockRange(ref.$to); + const marks = state.storedMarks || (state.selection.$to.parentOffset && state.selection.$from.marks()); if (!sinkListItem(schema.nodes.list_item)(state, (tx2: Transaction) => { - let tx3 = updateBullets(tx2, schema); + const tx3 = updateBullets(tx2, schema); marks && tx3.ensureMarks([...marks]); marks && tx3.setStoredMarks([...marks]); dispatch(tx3); })) { // couldn't sink into an existing list, so wrap in a new one - let newstate = state.applyTransaction(state.tr.setSelection(TextSelection.create(state.doc, range!.start, range!.end))); + const newstate = state.applyTransaction(state.tr.setSelection(TextSelection.create(state.doc, range!.start, range!.end))); if (!wrapInList(schema.nodes.ordered_list)(newstate.state, (tx2: Transaction) => { - let tx3 = updateBullets(tx2, schema); + const tx3 = updateBullets(tx2, schema); // when promoting to a list, assume list will format things so don't copy the stored marks. marks && tx3.ensureMarks([...marks]); marks && tx3.setStoredMarks([...marks]); @@ -131,10 +131,10 @@ export default function buildKeymap>(schema: S, mapKeys?: }); bind("Shift-Tab", (state: EditorState, dispatch: (tx: Transaction) => void) => { - var marks = state.storedMarks || (state.selection.$to.parentOffset && state.selection.$from.marks()); + const marks = state.storedMarks || (state.selection.$to.parentOffset && state.selection.$from.marks()); if (!liftListItem(schema.nodes.list_item)(state.tr, (tx2: Transaction) => { - let tx3 = updateBullets(tx2, schema); + const tx3 = updateBullets(tx2, schema); marks && tx3.ensureMarks([...marks]); marks && tx3.setStoredMarks([...marks]); dispatch(tx3); @@ -143,14 +143,14 @@ export default function buildKeymap>(schema: S, mapKeys?: } }); - let splitMetadata = (marks: any, tx: Transaction) => { + const splitMetadata = (marks: any, tx: Transaction) => { marks && tx.ensureMarks(marks.filter((val: any) => val.type !== schema.marks.metadata && val.type !== schema.marks.metadataKey && val.type !== schema.marks.metadataVal)); marks && tx.setStoredMarks(marks.filter((val: any) => val.type !== schema.marks.metadata && val.type !== schema.marks.metadataKey && val.type !== schema.marks.metadataVal)); return tx; }; - bind("Enter", (state: EditorState, dispatch: (tx: Transaction) => void) => { - var marks = state.storedMarks || (state.selection.$to.parentOffset && state.selection.$from.marks()); - if (!splitListItem(schema.nodes.list_item)(state, (tx3: Transaction) => dispatch(tx3))) { + bind("Enter", (state: EditorState, dispatch: (tx: Transaction>) => void) => { + const marks = state.storedMarks || (state.selection.$to.parentOffset && state.selection.$from.marks()); + if (!splitListItem(schema.nodes.list_item)(state, dispatch)) { if (!splitBlockKeepMarks(state, (tx3: Transaction) => { splitMetadata(marks, tx3); if (!liftListItem(schema.nodes.list_item)(tx3, dispatch as ((tx: Transaction>) => void))) { @@ -163,18 +163,18 @@ export default function buildKeymap>(schema: S, mapKeys?: return true; }); bind("Space", (state: EditorState, dispatch: (tx: Transaction) => void) => { - var marks = state.storedMarks || (state.selection.$to.parentOffset && state.selection.$from.marks()); + const marks = state.storedMarks || (state.selection.$to.parentOffset && state.selection.$from.marks()); dispatch(splitMetadata(marks, state.tr)); return false; }); bind(":", (state: EditorState, dispatch: (tx: Transaction) => void) => { - let range = state.selection.$from.blockRange(state.selection.$to, (node: any) => { + const range = state.selection.$from.blockRange(state.selection.$to, (node: any) => { return !node.marks || !node.marks.find((m: any) => m.type === schema.marks.metadata); }); - let path = (state.doc.resolve(state.selection.from - 1) as any).path; - let spaceSeparator = path[path.length - 3].childCount > 1 ? 0 : -1; - let textsel = TextSelection.create(state.doc, range!.end - path[path.length - 3].lastChild.nodeSize + spaceSeparator, range!.end); - let text = range ? state.doc.textBetween(textsel.from, textsel.to) : ""; + const path = (state.doc.resolve(state.selection.from - 1) as any).path; + const spaceSeparator = path[path.length - 3].childCount > 1 ? 0 : -1; + const textsel = TextSelection.create(state.doc, range!.end - path[path.length - 3].lastChild.nodeSize + spaceSeparator, range!.end); + const text = range ? state.doc.textBetween(textsel.from, textsel.to) : ""; let whitespace = text.length - 1; for (; whitespace >= 0 && text[whitespace] !== " "; whitespace--) { } if (text.endsWith(":")) { diff --git a/src/client/util/RichTextRules.ts b/src/client/util/RichTextRules.ts index f4c44e5ce..1a637df32 100644 --- a/src/client/util/RichTextRules.ts +++ b/src/client/util/RichTextRules.ts @@ -62,109 +62,109 @@ export const inpRules = { new InputRule( new RegExp(/^#([0-9]+)\s$/), (state, match, start, end) => { - let size = Number(match[1]); - let ruleProvider = FormattedTextBox.InputBoxOverlay!.props.ruleProvider; - let heading = NumCast(FormattedTextBox.InputBoxOverlay!.props.Document.heading); + const size = Number(match[1]); + const ruleProvider = FormattedTextBox.InputBoxOverlay!.props.ruleProvider; + const heading = NumCast(FormattedTextBox.InputBoxOverlay!.props.Document.heading); if (ruleProvider && heading) { - (Cast(FormattedTextBox.InputBoxOverlay!.props.Document, Doc) as Doc).heading = Number(match[1]); + (Cast(FormattedTextBox.InputBoxOverlay!.props.Document, Doc) as Doc).heading = size; return state.tr.deleteRange(start, end); } - return state.tr.deleteRange(start, end).addStoredMark(schema.marks.pFontSize.create({ fontSize: Number(match[1]) })); + return state.tr.deleteRange(start, end).addStoredMark(schema.marks.pFontSize.create({ fontSize: size })); }), new InputRule( new RegExp(/t/), (state, match, start, end) => { if (state.selection.to === state.selection.from) return null; - let node = (state.doc.resolve(start) as any).nodeAfter; + const node = (state.doc.resolve(start) as any).nodeAfter; return node ? state.tr.addMark(start, end, schema.marks.user_tag.create({ userid: Doc.CurrentUserEmail, tag: "todo", modified: Math.round(Date.now() / 1000 / 60) })) : state.tr; }), new InputRule( new RegExp(/i/), (state, match, start, end) => { if (state.selection.to === state.selection.from) return null; - let node = (state.doc.resolve(start) as any).nodeAfter; + const node = (state.doc.resolve(start) as any).nodeAfter; return node ? state.tr.addMark(start, end, schema.marks.user_tag.create({ userid: Doc.CurrentUserEmail, tag: "ignore", modified: Math.round(Date.now() / 1000 / 60) })) : state.tr; }), new InputRule( new RegExp(/\!/), (state, match, start, end) => { if (state.selection.to === state.selection.from) return null; - let node = (state.doc.resolve(start) as any).nodeAfter; + const node = (state.doc.resolve(start) as any).nodeAfter; return node ? state.tr.addMark(start, end, schema.marks.user_tag.create({ userid: Doc.CurrentUserEmail, tag: "important", modified: Math.round(Date.now() / 1000 / 60) })) : state.tr; }), new InputRule( new RegExp(/\x/), (state, match, start, end) => { if (state.selection.to === state.selection.from) return null; - let node = (state.doc.resolve(start) as any).nodeAfter; + const node = (state.doc.resolve(start) as any).nodeAfter; return node ? state.tr.addMark(start, end, schema.marks.user_tag.create({ userid: Doc.CurrentUserEmail, tag: "disagree", modified: Math.round(Date.now() / 1000 / 60) })) : state.tr; }), new InputRule( new RegExp(/^\^\^\s$/), (state, match, start, end) => { - let node = (state.doc.resolve(start) as any).nodeAfter; - let sm = state.storedMarks || undefined; - let ruleProvider = FormattedTextBox.InputBoxOverlay!.props.ruleProvider; - let heading = NumCast(FormattedTextBox.InputBoxOverlay!.props.Document.heading); + const node = (state.doc.resolve(start) as any).nodeAfter; + const sm = state.storedMarks || undefined; + const ruleProvider = FormattedTextBox.InputBoxOverlay!.props.ruleProvider; + const heading = NumCast(FormattedTextBox.InputBoxOverlay!.props.Document.heading); if (ruleProvider && heading) { ruleProvider["ruleAlign_" + heading] = "center"; return node ? state.tr.deleteRange(start, end).setStoredMarks([...node.marks, ...(sm ? sm : [])]) : state.tr; } - let replaced = node ? state.tr.replaceRangeWith(start, end, schema.nodes.paragraph.create({ align: "center" })).setStoredMarks([...node.marks, ...(sm ? sm : [])]) : + const replaced = node ? state.tr.replaceRangeWith(start, end, schema.nodes.paragraph.create({ align: "center" })).setStoredMarks([...node.marks, ...(sm ? sm : [])]) : state.tr; return replaced.setSelection(new TextSelection(replaced.doc.resolve(end - 2))); }), new InputRule( new RegExp(/^\[\[\s$/), (state, match, start, end) => { - let node = (state.doc.resolve(start) as any).nodeAfter; - let sm = state.storedMarks || undefined; - let ruleProvider = FormattedTextBox.InputBoxOverlay!.props.ruleProvider; - let heading = NumCast(FormattedTextBox.InputBoxOverlay!.props.Document.heading); + const node = (state.doc.resolve(start) as any).nodeAfter; + const sm = state.storedMarks || undefined; + const ruleProvider = FormattedTextBox.InputBoxOverlay!.props.ruleProvider; + const heading = NumCast(FormattedTextBox.InputBoxOverlay!.props.Document.heading); if (ruleProvider && heading) { ruleProvider["ruleAlign_" + heading] = "left"; return node ? state.tr.deleteRange(start, end).setStoredMarks([...node.marks, ...(sm ? sm : [])]) : state.tr; } - let replaced = node ? state.tr.replaceRangeWith(start, end, schema.nodes.paragraph.create({ align: "left" })).setStoredMarks([...node.marks, ...(sm ? sm : [])]) : + const replaced = node ? state.tr.replaceRangeWith(start, end, schema.nodes.paragraph.create({ align: "left" })).setStoredMarks([...node.marks, ...(sm ? sm : [])]) : state.tr; return replaced.setSelection(new TextSelection(replaced.doc.resolve(end - 2))); }), new InputRule( new RegExp(/^\]\]\s$/), (state, match, start, end) => { - let node = (state.doc.resolve(start) as any).nodeAfter; - let sm = state.storedMarks || undefined; - let ruleProvider = FormattedTextBox.InputBoxOverlay!.props.ruleProvider; - let heading = NumCast(FormattedTextBox.InputBoxOverlay!.props.Document.heading); + const node = (state.doc.resolve(start) as any).nodeAfter; + const sm = state.storedMarks || undefined; + const ruleProvider = FormattedTextBox.InputBoxOverlay!.props.ruleProvider; + const heading = NumCast(FormattedTextBox.InputBoxOverlay!.props.Document.heading); if (ruleProvider && heading) { ruleProvider["ruleAlign_" + heading] = "right"; return node ? state.tr.deleteRange(start, end).setStoredMarks([...node.marks, ...(sm ? sm : [])]) : state.tr; } - let replaced = node ? state.tr.replaceRangeWith(start, end, schema.nodes.paragraph.create({ align: "right" })).setStoredMarks([...node.marks, ...(sm ? sm : [])]) : + const replaced = node ? state.tr.replaceRangeWith(start, end, schema.nodes.paragraph.create({ align: "right" })).setStoredMarks([...node.marks, ...(sm ? sm : [])]) : state.tr; return replaced.setSelection(new TextSelection(replaced.doc.resolve(end - 2))); }), new InputRule( new RegExp(/##\s$/), (state, match, start, end) => { - let target = Docs.Create.TextDocument({ width: 75, height: 35, autoHeight: true, fontSize: 9, title: "inline comment" }); - let node = (state.doc.resolve(start) as any).nodeAfter; - let newNode = schema.nodes.dashComment.create({ docid: target[Id] }); - let dashDoc = schema.nodes.dashDoc.create({ width: 75, height: 35, title: "dashDoc", docid: target[Id], float: "right" }); - let sm = state.storedMarks || undefined; - let replaced = node ? state.tr.insert(start, newNode).replaceRangeWith(start + 1, end + 1, dashDoc).setStoredMarks([...node.marks, ...(sm ? sm : [])]) : + const target = Docs.Create.TextDocument({ width: 75, height: 35, autoHeight: true, fontSize: 9, title: "inline comment" }); + const node = (state.doc.resolve(start) as any).nodeAfter; + const newNode = schema.nodes.dashComment.create({ docid: target[Id] }); + const dashDoc = schema.nodes.dashDoc.create({ width: 75, height: 35, title: "dashDoc", docid: target[Id], float: "right" }); + const sm = state.storedMarks || undefined; + const replaced = node ? state.tr.insert(start, newNode).replaceRangeWith(start + 1, end + 1, dashDoc).setStoredMarks([...node.marks, ...(sm ? sm : [])]) : state.tr; return replaced;//.setSelection(new NodeSelection(replaced.doc.resolve(end))); }), new InputRule( new RegExp(/\(\(/), (state, match, start, end) => { - let node = (state.doc.resolve(start) as any).nodeAfter; - let sm = state.storedMarks || undefined; - let mark = state.schema.marks.highlight.create(); - let selected = state.tr.setSelection(new TextSelection(state.doc.resolve(start), state.doc.resolve(end))).addMark(start, end, mark); - let content = selected.selection.content(); - let replaced = node ? selected.replaceRangeWith(start, start, + const node = (state.doc.resolve(start) as any).nodeAfter; + const sm = state.storedMarks || undefined; + const mark = state.schema.marks.highlight.create(); + const selected = state.tr.setSelection(new TextSelection(state.doc.resolve(start), state.doc.resolve(end))).addMark(start, end, mark); + const content = selected.selection.content(); + const replaced = node ? selected.replaceRangeWith(start, start, schema.nodes.star.create({ visibility: true, text: content, textslice: content.toJSON() })).setStoredMarks([...node.marks, ...(sm ? sm : [])]) : state.tr; return replaced.setSelection(new TextSelection(replaced.doc.resolve(end + 1))); @@ -172,14 +172,14 @@ export const inpRules = { new InputRule( new RegExp(/\)\)/), (state, match, start, end) => { - let mark = state.schema.marks.highlight.create(); + const mark = state.schema.marks.highlight.create(); return state.tr.removeStoredMark(mark); }), new InputRule( new RegExp(/\^f\s$/), (state, match, start, end) => { - let newNode = schema.nodes.footnote.create({}); - let tr = state.tr; + const newNode = schema.nodes.footnote.create({}); + const tr = state.tr; tr.deleteRange(start, end).replaceSelectionWith(newNode); // replace insertion with a footnote. return tr.setSelection(new NodeSelection( // select the footnote node to open its display tr.doc.resolve( // get the location of the footnote node by subtracting the nodesize of the footnote from the current insertion point anchor (which will be immediately after the footnote node) diff --git a/src/client/util/RichTextSchema.tsx b/src/client/util/RichTextSchema.tsx index 522232e9f..cb03892f3 100644 --- a/src/client/util/RichTextSchema.tsx +++ b/src/client/util/RichTextSchema.tsx @@ -1,4 +1,4 @@ -import { action, observable, runInAction, reaction, IReactionDisposer } from "mobx"; +import { reaction, IReactionDisposer } from "mobx"; import { baseKeymap, toggleMark } from "prosemirror-commands"; import { redo, undo } from "prosemirror-history"; import { keymap } from "prosemirror-keymap"; @@ -257,9 +257,9 @@ export const nodes: { [index: string]: NodeSpec } = { if (node.attrs.mapStyle === "bullet") return ['ul', 0]; const decMap = bs ? "decimal" + bs : ""; const multiMap = bs === 1 ? "decimal1" : bs === 2 ? "upper-alpha" : bs === 3 ? "lower-roman" : bs === 4 ? "lower-alpha" : ""; - let map = node.attrs.mapStyle === "decimal" ? decMap : multiMap; - let fsize = node.attrs.setFontSize ? node.attrs.setFontSize : node.attrs.inheritedFontSize; - let ffam = node.attrs.setFontFamily; + const map = node.attrs.mapStyle === "decimal" ? decMap : multiMap; + const fsize = node.attrs.setFontSize ? node.attrs.setFontSize : node.attrs.inheritedFontSize; + const ffam = node.attrs.setFontFamily; return node.attrs.visibility ? ['ol', { class: `${map}-ol`, style: `list-style: none; font-size: ${fsize}; font-family: ${ffam}` }, 0] : ['ol', { class: `${map}-ol`, style: `list-style: none; font-size: ${fsize}; font-family: ${ffam}` }]; } @@ -287,7 +287,7 @@ export const nodes: { [index: string]: NodeSpec } = { const bs = node.attrs.bulletStyle; const decMap = bs ? "decimal" + bs : ""; const multiMap = bs === 1 ? "decimal1" : bs === 2 ? "upper-alpha" : bs === 3 ? "lower-roman" : bs === 4 ? "lower-alpha" : ""; - let map = node.attrs.mapStyle === "decimal" ? decMap : node.attrs.mapStyle === "multi" ? multiMap : ""; + const map = node.attrs.mapStyle === "decimal" ? decMap : node.attrs.mapStyle === "multi" ? multiMap : ""; return node.attrs.visibility ? ["li", { class: `${map}` }, 0] : ["li", { class: `${map}` }, "..."]; //return ["li", { class: `${map}` }, 0]; } @@ -432,7 +432,7 @@ export const marks: { [index: string]: MarkSpec } = { tag: "span", getAttrs: (p: any) => { if (typeof (p) !== "string") { - let style = getComputedStyle(p); + const style = getComputedStyle(p); if (style.textDecoration === "underline") return null; if (p.parentElement.outerHTML.indexOf("text-decoration: underline") !== -1 && p.parentElement.outerHTML.indexOf("text-decoration-style: dotted") !== -1) { @@ -457,7 +457,7 @@ export const marks: { [index: string]: MarkSpec } = { tag: "span", getAttrs: (p: any) => { if (typeof (p) !== "string") { - let style = getComputedStyle(p); + const style = getComputedStyle(p); if (style.textDecoration === "underline" || p.parentElement.outerHTML.indexOf("text-decoration-style:line") !== -1) { return null; } @@ -493,11 +493,11 @@ export const marks: { [index: string]: MarkSpec } = { }, group: "inline", toDOM(node: any) { - let uid = node.attrs.userid.replace(".", "").replace("@", ""); - let min = Math.round(node.attrs.modified / 12); - let hr = Math.round(min / 60); - let day = Math.round(hr / 60 / 24); - let remote = node.attrs.userid !== Doc.CurrentUserEmail ? " userMark-remote" : ""; + const uid = node.attrs.userid.replace(".", "").replace("@", ""); + const min = Math.round(node.attrs.modified / 12); + const hr = Math.round(min / 60); + const day = Math.round(hr / 60 / 24); + const remote = node.attrs.userid !== Doc.CurrentUserEmail ? " userMark-remote" : ""; return node.attrs.opened ? ['span', { class: "userMark-" + uid + remote + " userMark-min-" + min + " userMark-hr-" + hr + " userMark-day-" + day }, 0] : ['span', { class: "userMark-" + uid + remote + " userMark-min-" + min + " userMark-hr-" + hr + " userMark-day-" + day }, ['span', 0]]; @@ -513,7 +513,7 @@ export const marks: { [index: string]: MarkSpec } = { }, group: "inline", toDOM(node: any) { - let uid = node.attrs.userid.replace(".", "").replace("@", ""); + const uid = node.attrs.userid.replace(".", "").replace("@", ""); return node.attrs.opened ? ['span', { class: "userTag-" + uid + " userTag-" + node.attrs.tag }, 0] : ['span', { class: "userTag-" + uid + " userTag-" + node.attrs.tag }, ['span', 0]]; @@ -534,7 +534,7 @@ export const marks: { [index: string]: MarkSpec } = { }, parseDOM: [{ tag: "span", getAttrs(dom: any) { - let cstyle = getComputedStyle(dom); + const cstyle = getComputedStyle(dom); if (cstyle.font) { if (cstyle.font.indexOf("Times New Roman") !== -1) return { family: "Times New Roman" }; if (cstyle.font.indexOf("Arial") !== -1) return { family: "Arial" }; @@ -599,7 +599,7 @@ export class ImageResizeView { this._handle.style.display = "none"; this._handle.style.bottom = "-10px"; this._handle.style.right = "-10px"; - let self = this; + const self = this; this._img.onclick = function (e: any) { e.stopPropagation(); e.preventDefault(); @@ -620,8 +620,8 @@ export class ImageResizeView { this._handle.onpointerdown = function (e: any) { e.preventDefault(); e.stopPropagation(); - let wid = Number(getComputedStyle(self._img).width.replace(/px/, "")); - let hgt = Number(getComputedStyle(self._img).height.replace(/px/, "")); + const wid = Number(getComputedStyle(self._img).width.replace(/px/, "")); + const hgt = Number(getComputedStyle(self._img).height.replace(/px/, "")); const startX = e.pageX; const startWidth = parseFloat(node.attrs.width); const onpointermove = (e: any) => { @@ -634,7 +634,7 @@ export class ImageResizeView { const onpointerup = () => { document.removeEventListener("pointermove", onpointermove); document.removeEventListener("pointerup", onpointerup); - let pos = view.state.selection.from; + const pos = view.state.selection.from; view.dispatch(view.state.tr.setNodeMarkup(getPos(), null, { ...node.attrs, width: self._outer.style.width, height: self._outer.style.height })); view.dispatch(view.state.tr.setSelection(new NodeSelection(view.state.doc.resolve(pos)))); }; @@ -671,19 +671,19 @@ export class DashDocCommentView { this._collapsed.id = "DashDocCommentView-" + node.attrs.docid; this._view = view; this._collapsed.onpointerdown = (e: any) => { - let node = view.state.doc.nodeAt(getPos() + 1); + const node = view.state.doc.nodeAt(getPos() + 1); view.dispatch(view.state.tr. setNodeMarkup(getPos() + 1, undefined, { ...node.attrs, hidden: node.attrs.hidden ? false : true })); // update the attrs setTimeout(() => node.attrs.hidden && DocServer.GetRefField(node.attrs.docid).then(async dashDoc => dashDoc instanceof Doc && Doc.linkFollowHighlight(dashDoc)), 100); - } + }; this._collapsed.onpointerenter = (e: any) => { - let node = view.state.doc.nodeAt(getPos() + 1); + const node = view.state.doc.nodeAt(getPos() + 1); DocServer.GetRefField(node.attrs.docid).then(async dashDoc => dashDoc instanceof Doc && Doc.linkFollowHighlight(dashDoc)); e.preventDefault(); e.stopPropagation(); }; this._collapsed.onpointerleave = (e: any) => { - let node = view.state.doc.nodeAt(getPos() + 1); + const node = view.state.doc.nodeAt(getPos() + 1); DocServer.GetRefField(node.attrs.docid).then(async dashDoc => dashDoc instanceof Doc && Doc.linkFollowUnhighlight()); e.preventDefault(); e.stopPropagation(); @@ -701,7 +701,7 @@ export class DashDocView { _textBox: FormattedTextBox; getDocTransform = () => { - let { scale, translateX, translateY } = Utils.GetScreenTransform(this._outer); + const { scale, translateX, translateY } = Utils.GetScreenTransform(this._outer); return new Transform(-translateX, -translateY, 1).scale(1 / this.contentScaling() / scale); } contentScaling = () => NumCast(this._dashDoc!.nativeWidth) > 0 && !this._dashDoc!.ignoreAspect ? this._dashDoc![WidthSym]() / NumCast(this._dashDoc!.nativeWidth) : 1; @@ -721,24 +721,24 @@ export class DashDocView { this._dashSpan.style.position = "absolute"; this._dashSpan.style.display = "inline-block"; this._dashSpan.style.borderWidth = "4"; - let removeDoc = () => { - let pos = getPos(); - let ns = new NodeSelection(view.state.doc.resolve(pos)); + const removeDoc = () => { + const pos = getPos(); + const ns = new NodeSelection(view.state.doc.resolve(pos)); view.dispatch(view.state.tr.setSelection(ns).deleteSelection()); return true; }; this._dashSpan.onpointerleave = () => { - let ele = document.getElementById("DashDocCommentView-" + node.attrs.docid); + const ele = document.getElementById("DashDocCommentView-" + node.attrs.docid); if (ele) { (ele as HTMLDivElement).style.backgroundColor = ""; } - } + }; this._dashSpan.onpointerenter = () => { - let ele = document.getElementById("DashDocCommentView-" + node.attrs.docid); + const ele = document.getElementById("DashDocCommentView-" + node.attrs.docid); if (ele) { (ele as HTMLDivElement).style.backgroundColor = "orange"; } - } + }; DocServer.GetRefField(node.attrs.docid).then(async dashDoc => { if (dashDoc instanceof Doc) { self._dashDoc = dashDoc; @@ -777,7 +777,7 @@ export class DashDocView { />, this._dashSpan); } }); - let self = this; + const self = this; this._dashSpan.onkeydown = function (e: any) { e.stopPropagation(); }; this._dashSpan.onkeypress = function (e: any) { e.stopPropagation(); }; this._dashSpan.onwheel = function (e: any) { e.preventDefault(); }; @@ -830,7 +830,7 @@ export class FootnoteView { } open() { // Append a tooltip to the outer node - let tooltip = this.dom.appendChild(document.createElement("div")); + const tooltip = this.dom.appendChild(document.createElement("div")); tooltip.className = "footnote-tooltip"; // And put a sub-ProseMirror into that this.innerView = new EditorView(tooltip, { @@ -885,14 +885,14 @@ export class FootnoteView { this.dom.textContent = ""; } dispatchInner(tr: any) { - let { state, transactions } = this.innerView.state.applyTransaction(tr); + const { state, transactions } = this.innerView.state.applyTransaction(tr); this.innerView.updateState(state); if (!tr.getMeta("fromOutside")) { - let outerTr = this.outerView.state.tr, offsetMap = StepMap.offset(this.getPos() + 1); - for (let transaction of transactions) { - let steps = transaction.steps; - for (let step of steps) { + const outerTr = this.outerView.state.tr, offsetMap = StepMap.offset(this.getPos() + 1); + for (const transaction of transactions) { + const steps = transaction.steps; + for (const step of steps) { outerTr.step(step.map(offsetMap)); } } @@ -903,11 +903,11 @@ export class FootnoteView { if (!node.sameMarkup(this.node)) return false; this.node = node; if (this.innerView) { - let state = this.innerView.state; - let start = node.content.findDiffStart(state.doc.content); + const state = this.innerView.state; + const start = node.content.findDiffStart(state.doc.content); if (start !== null) { let { a: endA, b: endB } = node.content.findDiffEnd(state.doc.content); - let overlap = start - Math.min(endA, endB); + const overlap = start - Math.min(endA, endB); if (overlap > 0) { endA += overlap; endB += overlap; } this.innerView.dispatch( state.tr @@ -967,10 +967,10 @@ export class SummarizedView { className = (visible: boolean) => "formattedTextBox-summarizer" + (visible ? "" : "-collapsed"); updateSummarizedText(start?: any) { - let mark = this._view.state.schema.marks.highlight.create(); + const mark = this._view.state.schema.marks.highlight.create(); let endPos = start; - let visited = new Set(); + const visited = new Set(); for (let i: number = start + 1; i < this._view.state.doc.nodeSize - 1; i++) { let skip = false; this._view.state.doc.nodesBetween(start, i, (node: Node, pos: number, parent: Node, index: number) => { @@ -999,7 +999,7 @@ export const schema = new Schema({ nodes, marks }); const fromJson = schema.nodeFromJSON; schema.nodeFromJSON = (json: any) => { - let node = fromJson(json); + const node = fromJson(json); if (json.type === "star") { node.attrs.text = Slice.fromJSON(schema, node.attrs.textslice); } diff --git a/src/client/util/Scripting.ts b/src/client/util/Scripting.ts index ff4451824..0fa96963e 100644 --- a/src/client/util/Scripting.ts +++ b/src/client/util/Scripting.ts @@ -94,16 +94,16 @@ function Run(script: string | undefined, customParams: string[], diagnostics: an return { compiled: false, errors: diagnostics }; } - let paramNames = Object.keys(scriptingGlobals); - let params = paramNames.map(key => scriptingGlobals[key]); + const paramNames = Object.keys(scriptingGlobals); + const params = paramNames.map(key => scriptingGlobals[key]); // let fieldTypes = [Doc, ImageField, PdfField, VideoField, AudioField, List, RichTextField, ScriptField, ComputedField, CompileScript]; // let paramNames = ["Docs", ...fieldTypes.map(fn => fn.name)]; // let params: any[] = [Docs, ...fieldTypes]; - let compiledFunction = new Function(...paramNames, `return ${script}`); - let { capturedVariables = {} } = options; - let run = (args: { [name: string]: any } = {}, onError?: (e: any) => void, errorVal?: any): ScriptResult => { - let argsArray: any[] = []; - for (let name of customParams) { + const compiledFunction = new Function(...paramNames, `return ${script}`); + const { capturedVariables = {} } = options; + const run = (args: { [name: string]: any } = {}, onError?: (e: any) => void, errorVal?: any): ScriptResult => { + const argsArray: any[] = []; + for (const name of customParams) { if (name === "this") { continue; } @@ -113,7 +113,7 @@ function Run(script: string | undefined, customParams: string[], diagnostics: an argsArray.push(capturedVariables[name]); } } - let thisParam = args.this || capturedVariables.this; + const thisParam = args.this || capturedVariables.this; let batch: { end(): void } | undefined = undefined; try { if (!options.editable) { @@ -146,7 +146,7 @@ class ScriptingCompilerHost { // getSourceFile(fileName: string, languageVersion: ts.ScriptTarget, onError?: ((message: string) => void) | undefined, shouldCreateNewSourceFile?: boolean | undefined): ts.SourceFile | undefined { getSourceFile(fileName: string, languageVersion: any, onError?: ((message: string) => void) | undefined, shouldCreateNewSourceFile?: boolean | undefined): any | undefined { - let contents = this.readFile(fileName); + const contents = this.readFile(fileName); if (contents !== undefined) { return ts.createSourceFile(fileName, contents, languageVersion, true); } @@ -180,7 +180,7 @@ class ScriptingCompilerHost { return this.files.some(file => file.fileName === fileName); } readFile(fileName: string): string | undefined { - let file = this.files.find(file => file.fileName === fileName); + const file = this.files.find(file => file.fileName === fileName); if (file) { return file.content; } @@ -218,7 +218,7 @@ export function CompileScript(script: string, options: ScriptOptions = {}): Comp if (options.globals) { Scripting.setScriptingGlobals(options.globals); } - let host = new ScriptingCompilerHost; + const host = new ScriptingCompilerHost; if (options.traverser) { const sourceFile = ts.createSourceFile('script.ts', script, ts.ScriptTarget.ES2015, true); const onEnter = typeof options.traverser === "object" ? options.traverser.onEnter : options.traverser; @@ -240,7 +240,7 @@ export function CompileScript(script: string, options: ScriptOptions = {}): Comp script = printer.printFile(transformed[0]); result.dispose(); } - let paramNames: string[] = []; + const paramNames: string[] = []; if ("this" in params || "this" in capturedVariables) { paramNames.push("this"); } @@ -248,7 +248,7 @@ export function CompileScript(script: string, options: ScriptOptions = {}): Comp if (key === "this") continue; paramNames.push(key); } - let paramList = paramNames.map(key => { + const paramList = paramNames.map(key => { const val = params[key]; return `${key}: ${val}`; }); @@ -258,18 +258,18 @@ export function CompileScript(script: string, options: ScriptOptions = {}): Comp paramNames.push(key); paramList.push(`${key}: ${typeof val === "object" ? Object.getPrototypeOf(val).constructor.name : typeof val}`); } - let paramString = paramList.join(", "); - let funcScript = `(function(${paramString})${requiredType ? `: ${requiredType}` : ''} { + const paramString = paramList.join(", "); + const funcScript = `(function(${paramString})${requiredType ? `: ${requiredType}` : ''} { ${addReturn ? `return ${script};` : script} })`; host.writeFile("file.ts", funcScript); if (typecheck) host.writeFile('node_modules/typescript/lib/lib.d.ts', typescriptlib); - let program = ts.createProgram(["file.ts"], {}, host); - let testResult = program.emit(); - let outputText = host.readFile("file.js"); + const program = ts.createProgram(["file.ts"], {}, host); + const testResult = program.emit(); + const outputText = host.readFile("file.js"); - let diagnostics = ts.getPreEmitDiagnostics(program).concat(testResult.diagnostics); + const diagnostics = ts.getPreEmitDiagnostics(program).concat(testResult.diagnostics); const result = Run(outputText, paramNames, diagnostics, script, options); diff --git a/src/client/util/SearchUtil.ts b/src/client/util/SearchUtil.ts index 2cf13680a..7a9176bec 100644 --- a/src/client/util/SearchUtil.ts +++ b/src/client/util/SearchUtil.ts @@ -34,36 +34,36 @@ export namespace SearchUtil { export function Search(query: string, returnDocs: false, options?: SearchParams): Promise; export async function Search(query: string, returnDocs: boolean, options: SearchParams = {}) { query = query || "*"; //If we just have a filter query, search for * as the query - let result: IdSearchResult = JSON.parse(await rp.get(Utils.prepend("/search"), { + const result: IdSearchResult = JSON.parse(await rp.get(Utils.prepend("/search"), { qs: { ...options, q: query }, })); if (!returnDocs) { return result; } - let { ids, numFound, highlighting } = result; + const { ids, highlighting } = result; - let txtresult = query !== "*" && JSON.parse(await rp.get(Utils.prepend("/textsearch"), { + const txtresult = query !== "*" && JSON.parse(await rp.get(Utils.prepend("/textsearch"), { qs: { ...options, q: query }, })); - let fileids = txtresult ? txtresult.ids : []; - let newIds: string[] = []; - let newLines: string[][] = []; + const fileids = txtresult ? txtresult.ids : []; + const newIds: string[] = []; + const newLines: string[][] = []; await Promise.all(fileids.map(async (tr: string, i: number) => { - let docQuery = "fileUpload_t:" + tr.substr(0, 7); //If we just have a filter query, search for * as the query - let docResult = JSON.parse(await rp.get(Utils.prepend("/search"), { qs: { ...options, q: docQuery } })); + const docQuery = "fileUpload_t:" + tr.substr(0, 7); //If we just have a filter query, search for * as the query + const docResult = JSON.parse(await rp.get(Utils.prepend("/search"), { qs: { ...options, q: docQuery } })); newIds.push(...docResult.ids); newLines.push(...docResult.ids.map((dr: any) => txtresult.lines[i])); })); - let theDocs: Doc[] = []; - let theLines: string[][] = []; + const theDocs: Doc[] = []; + const theLines: string[][] = []; const textDocMap = await DocServer.GetRefFields(newIds); const textDocs = newIds.map((id: string) => textDocMap[id]).map(doc => doc as Doc); for (let i = 0; i < textDocs.length; i++) { - let testDoc = textDocs[i]; + const testDoc = textDocs[i]; if (testDoc instanceof Doc && testDoc.type !== DocumentType.KVP && theDocs.findIndex(d => Doc.AreProtosEqual(d, testDoc)) === -1) { theDocs.push(Doc.GetProto(testDoc)); theLines.push(newLines[i].map(line => line.replace(query, query.toUpperCase()))); @@ -73,7 +73,7 @@ export namespace SearchUtil { const docMap = await DocServer.GetRefFields(ids); const docs = ids.map((id: string) => docMap[id]).map(doc => doc as Doc); for (let i = 0; i < ids.length; i++) { - let testDoc = docs[i]; + const testDoc = docs[i]; if (testDoc instanceof Doc && testDoc.type !== DocumentType.KVP && (options.allowAliases || theDocs.findIndex(d => Doc.AreProtosEqual(d, testDoc)) === -1)) { theDocs.push(testDoc); theLines.push([]); diff --git a/src/client/util/SerializationHelper.ts b/src/client/util/SerializationHelper.ts index ff048f647..1f6b939d3 100644 --- a/src/client/util/SerializationHelper.ts +++ b/src/client/util/SerializationHelper.ts @@ -1,7 +1,6 @@ -import { PropSchema, serialize, deserialize, custom, setDefaultModelSchema, getDefaultModelSchema, primitive, SKIP } from "serializr"; -import { Field, Doc } from "../../new_fields/Doc"; +import { PropSchema, serialize, deserialize, custom, setDefaultModelSchema, getDefaultModelSchema } from "serializr"; +import { Field } from "../../new_fields/Doc"; import { ClientUtils } from "./ClientUtils"; -import { emptyFunction } from "../../Utils"; let serializing = 0; export function afterDocDeserialize(cb: (err: any, val: any) => void, err: any, newValue: any) { @@ -65,8 +64,8 @@ export namespace SerializationHelper { } } -let serializationTypes: { [name: string]: { ctor: { new(): any }, afterDeserialize?: (obj: any) => void | Promise } } = {}; -let reverseMap: { [ctor: string]: string } = {}; +const serializationTypes: { [name: string]: { ctor: { new(): any }, afterDeserialize?: (obj: any) => void | Promise } } = {}; +const reverseMap: { [ctor: string]: string } = {}; export interface DeserializableOpts { (constructor: { new(...args: any[]): any }): void; diff --git a/src/client/util/SharingManager.tsx b/src/client/util/SharingManager.tsx index cc1d628b1..7496ac73c 100644 --- a/src/client/util/SharingManager.tsx +++ b/src/client/util/SharingManager.tsx @@ -9,7 +9,6 @@ import { Utils } from "../../Utils"; import "./SharingManager.scss"; import { Id } from "../../new_fields/FieldSymbols"; import { observer } from "mobx-react"; -import { MainView } from "../views/MainView"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { library } from '@fortawesome/fontawesome-svg-core'; import * as fa from '@fortawesome/free-solid-svg-icons'; @@ -103,10 +102,10 @@ export default class SharingManager extends React.Component<{}> { } populateUsers = async () => { - let userList = await RequestPromise.get(Utils.prepend("/getUsers")); + const userList = await RequestPromise.get(Utils.prepend("/getUsers")); const raw = JSON.parse(userList) as User[]; const evaluating = raw.map(async user => { - let isCandidate = user.email !== Doc.CurrentUserEmail; + const isCandidate = user.email !== Doc.CurrentUserEmail; if (isCandidate) { const userDocument = await DocServer.GetRefField(user.userDocumentId); if (userDocument instanceof Doc) { @@ -130,7 +129,7 @@ export default class SharingManager extends React.Component<{}> { if (state === SharingPermissions.None) { const metadata = (await DocCastAsync(manager[key])); if (metadata) { - let sharedAlias = (await DocCastAsync(metadata.sharedAlias))!; + const sharedAlias = (await DocCastAsync(metadata.sharedAlias))!; Doc.RemoveDocFromList(notificationDoc, storage, sharedAlias); manager[key] = undefined; } @@ -145,7 +144,7 @@ export default class SharingManager extends React.Component<{}> { } private setExternalSharing = (state: string) => { - let sharingDoc = this.sharingDoc; + const sharingDoc = this.sharingDoc; if (!sharingDoc) { return; } @@ -156,7 +155,7 @@ export default class SharingManager extends React.Component<{}> { if (!this.targetDoc) { return undefined; } - let baseUrl = Utils.prepend("/doc/" + this.targetDoc[Id]); + const baseUrl = Utils.prepend("/doc/" + this.targetDoc[Id]); return `${baseUrl}?sharing=true`; } @@ -178,7 +177,7 @@ export default class SharingManager extends React.Component<{}> { } private focusOn = (contents: string) => { - let title = this.targetDoc ? StrCast(this.targetDoc.title) : ""; + const title = this.targetDoc ? StrCast(this.targetDoc.title) : ""; return ( { fontSizeBtns.push(this.dropdownFontSizeBtn(String(mark.attrs.fontSize), "color: black; width: 50px;", mark, this.view, this.changeToFontSize)); }); - let newfontSizeDom = (new Dropdown(fontSizeBtns, { + const newfontSizeDom = (new Dropdown(fontSizeBtns, { label: label, css: "color:black; min-width: 60px;" }) as MenuItem).render(this.view).dom; @@ -312,12 +312,12 @@ export class TooltipTextMenu { //label of dropdown will change to given label updateFontStyleDropdown(label: string) { //font STYLES - let fontBtns: MenuItem[] = []; + const fontBtns: MenuItem[] = []; this.fontStyles.forEach((mark) => { fontBtns.push(this.dropdownFontFamilyBtn(mark.attrs.family, "color: black; font-family: " + mark.attrs.family + ", sans-serif; width: 125px;", mark, this.view, this.changeToFontFamily)); }); - let newfontStyleDom = (new Dropdown(fontBtns, { + const newfontStyleDom = (new Dropdown(fontBtns, { label: label, css: "color:black; width: 125px;" }) as MenuItem).render(this.view).dom; @@ -339,19 +339,19 @@ export class TooltipTextMenu { this.linkText.style.overflow = "hidden"; this.linkText.style.color = "white"; this.linkText.onpointerdown = (e: PointerEvent) => { e.stopPropagation(); }; - let linkBtn = document.createElement("div"); + const linkBtn = document.createElement("div"); linkBtn.textContent = ">>"; linkBtn.style.width = "10px"; linkBtn.style.height = "10px"; linkBtn.style.color = "white"; linkBtn.style.cssFloat = "left"; linkBtn.onpointerdown = (e: PointerEvent) => { - let node = this.view.state.selection.$from.nodeAfter; - let link = node && node.marks.find(m => m.type.name === "link"); + const node = this.view.state.selection.$from.nodeAfter; + const link = node && node.marks.find(m => m.type.name === "link"); if (link) { - let href: string = link.attrs.href; + const href: string = link.attrs.href; if (href.indexOf(Utils.prepend("/doc/")) === 0) { - let docid = href.replace(Utils.prepend("/doc/"), ""); + const docid = href.replace(Utils.prepend("/doc/"), ""); DocServer.GetRefField(docid).then(action((f: Opt) => { if (f instanceof Doc) { if (DocumentManager.Instance.getDocumentView(f)) { @@ -374,23 +374,23 @@ export class TooltipTextMenu { this.linkDrag.id = "link-drag"; this.linkDrag.onpointerdown = (e: PointerEvent) => { if (!this.editorProps) return; - let dragData = new DragManager.LinkDragData(this.editorProps.Document); + const dragData = new DragManager.LinkDragData(this.editorProps.Document); dragData.dontClearTextBox = true; // hack to get source context -sy - let docView = DocumentManager.Instance.getDocumentView(this.editorProps.Document); + const docView = DocumentManager.Instance.getDocumentView(this.editorProps.Document); e.stopPropagation(); - let ctrlKey = e.ctrlKey; + const ctrlKey = e.ctrlKey; DragManager.StartLinkDrag(this.linkDrag!, dragData, e.clientX, e.clientY, { handlers: { dragComplete: action(() => { if (dragData.linkDocument) { - let linkDoc = dragData.linkDocument; - let proto = Doc.GetProto(linkDoc); + const linkDoc = dragData.linkDocument; + const proto = Doc.GetProto(linkDoc); if (proto && docView) { proto.sourceContext = docView.props.ContainingCollectionDoc; } - let text = this.makeLink(linkDoc, StrCast(linkDoc.anchor2.title), ctrlKey ? "onRight" : "inTab"); + const text = this.makeLink(linkDoc, StrCast(linkDoc.anchor2.title), ctrlKey ? "onRight" : "inTab"); if (linkDoc instanceof Doc && linkDoc.anchor2 instanceof Doc) { proto.title = text === "" ? proto.title : text + " to " + linkDoc.anchor2.title; // TODODO open to more descriptive descriptions of following in text link } @@ -406,8 +406,8 @@ export class TooltipTextMenu { this.tooltip.appendChild(this.linkEditor); } - let node = this.view.state.selection.$from.nodeAfter; - let link = node && node.marks.find(m => m.type.name === "link"); + const node = this.view.state.selection.$from.nodeAfter; + const link = node && node.marks.find(m => m.type.name === "link"); this.linkText.textContent = link ? link.attrs.href : "-empty-"; this.linkText.onkeydown = (e: KeyboardEvent) => { @@ -420,19 +420,19 @@ export class TooltipTextMenu { } async getTextLinkTargetTitle() { - let node = this.view.state.selection.$from.nodeAfter; - let link = node && node.marks.find(m => m.type.name === "link"); + const node = this.view.state.selection.$from.nodeAfter; + const link = node && node.marks.find(m => m.type.name === "link"); if (link) { - let href = link.attrs.href; + const href = link.attrs.href; if (href) { if (href.indexOf(Utils.prepend("/doc/")) === 0) { const linkclicked = href.replace(Utils.prepend("/doc/"), "").split("?")[0]; if (linkclicked) { - let linkDoc = await DocServer.GetRefField(linkclicked); + const linkDoc = await DocServer.GetRefField(linkclicked); if (linkDoc instanceof Doc) { - let anchor1 = await Cast(linkDoc.anchor1, Doc); - let anchor2 = await Cast(linkDoc.anchor2, Doc); - let currentDoc = SelectionManager.SelectedDocuments().length && SelectionManager.SelectedDocuments()[0].props.Document; + const anchor1 = await Cast(linkDoc.anchor1, Doc); + const anchor2 = await Cast(linkDoc.anchor2, Doc); + const currentDoc = SelectionManager.SelectedDocuments().length && SelectionManager.SelectedDocuments()[0].props.Document; if (currentDoc && anchor1 && anchor2) { if (Doc.AreProtosEqual(currentDoc, anchor1)) { return StrCast(anchor2.title); @@ -453,18 +453,18 @@ export class TooltipTextMenu { } async createLinkDropdown() { - let targetTitle = await this.getTextLinkTargetTitle(); - let input = document.createElement("input"); + const targetTitle = await this.getTextLinkTargetTitle(); + const input = document.createElement("input"); // menu item for input for hyperlink url // TODO: integrate search to allow users to search for a doc to link to - let linkInfo = new MenuItem({ + const linkInfo = new MenuItem({ title: "", execEvent: "", class: "button-setting-disabled", css: "", render() { - let p = document.createElement("p"); + const p = document.createElement("p"); p.textContent = "Linked to:"; input.type = "text"; @@ -475,7 +475,7 @@ export class TooltipTextMenu { input.focus(); }; - let div = document.createElement("div"); + const div = document.createElement("div"); div.appendChild(p); div.appendChild(input); return div; @@ -487,13 +487,13 @@ export class TooltipTextMenu { }); // menu item to update/apply the hyperlink to the selected text - let linkApply = new MenuItem({ + const linkApply = new MenuItem({ title: "", execEvent: "", class: "", css: "", render() { - let button = document.createElement("button"); + const button = document.createElement("button"); button.className = "link-url-button"; button.textContent = "Apply hyperlink"; return button; @@ -507,17 +507,17 @@ export class TooltipTextMenu { // menu item to remove the link // TODO: allow this to be undoable - let self = this; - let deleteLink = new MenuItem({ + const self = this; + const deleteLink = new MenuItem({ title: "Delete link", execEvent: "", class: "separated-button", css: "", render() { - let button = document.createElement("button"); + const button = document.createElement("button"); button.textContent = "Remove link"; - let wrapper = document.createElement("div"); + const wrapper = document.createElement("div"); wrapper.appendChild(button); return wrapper; }, @@ -525,15 +525,15 @@ export class TooltipTextMenu { async run() { self.deleteLink(); // update link dropdown - let dropdown = await self.createLinkDropdown(); - let newLinkDropdowndom = dropdown.render(self.view).dom; + const dropdown = await self.createLinkDropdown(); + const newLinkDropdowndom = dropdown.render(self.view).dom; self._linkDropdownDom && self.tooltip.replaceChild(newLinkDropdowndom, self._linkDropdownDom); self._linkDropdownDom = newLinkDropdowndom; } }); - let linkDropdown = new Dropdown(targetTitle ? [linkInfo, linkApply, deleteLink] : [linkInfo, linkApply], { class: "buttonSettings-dropdown" }) as MenuItem; + const linkDropdown = new Dropdown(targetTitle ? [linkInfo, linkApply, deleteLink] : [linkInfo, linkApply], { class: "buttonSettings-dropdown" }) as MenuItem; return linkDropdown; } @@ -542,10 +542,10 @@ export class TooltipTextMenu { // } makeLink = (targetDoc: Doc, title: string, location: string): string => { - let link = this.view.state.schema.marks.link.create({ href: Utils.prepend("/doc/" + targetDoc[Id]), title: title, location: location }); + const link = this.view.state.schema.marks.link.create({ href: Utils.prepend("/doc/" + targetDoc[Id]), title: title, location: location }); this.view.dispatch(this.view.state.tr.removeMark(this.view.state.selection.from, this.view.state.selection.to, this.view.state.schema.marks.link). addMark(this.view.state.selection.from, this.view.state.selection.to, link)); - let node = this.view.state.selection.$from.nodeAfter; + const node = this.view.state.selection.$from.nodeAfter; if (node && node.text) { return node.text; } @@ -562,9 +562,9 @@ export class TooltipTextMenu { } deleteLink = () => { - let node = this.view.state.selection.$from.nodeAfter; - let link = node && node.marks.find(m => m.type === this.view.state.schema.marks.link); - let href = link!.attrs.href; + const node = this.view.state.selection.$from.nodeAfter; + const link = node && node.marks.find(m => m.type === this.view.state.schema.marks.link); + const href = link!.attrs.href; if (href) { if (href.indexOf(Utils.prepend("/doc/")) === 0) { const linkclicked = href.replace(Utils.prepend("/doc/"), "").split("?")[0]; @@ -599,7 +599,7 @@ export class TooltipTextMenu { } createLink() { - let markType = schema.marks.link; + const markType = schema.marks.link; return new MenuItem({ title: "Add or remove link", label: "Add or remove link", @@ -613,8 +613,8 @@ export class TooltipTextMenu { let curLink = ""; if (this.markActive(state, markType)) { - let { from, $from, to, empty } = state.selection; - let node = state.doc.nodeAt(from); + const { from, $from, to, empty } = state.selection; + const node = state.doc.nodeAt(from); node && node.marks.map(m => { m.type === markType && (curLink = m.attrs.href); }); @@ -649,7 +649,7 @@ export class TooltipTextMenu { if (listTypeBtn) { this.tooltip.removeChild(listTypeBtn); } //Make a dropdown of all list types - let toAdd: MenuItem[] = []; + const toAdd: MenuItem[] = []; this.listTypeToIcon.forEach((icon, type) => { toAdd.push(this.dropdownNodeBtn(icon, "color: black; width: 40px;", type, this.view, this.listTypes, this.changeToNodeType)); }); @@ -683,22 +683,22 @@ export class TooltipTextMenu { public static insertStar(state: EditorState, dispatch: any) { if (state.selection.empty) return false; - let mark = state.schema.marks.highlight.create(); - let tr = state.tr; + const mark = state.schema.marks.highlight.create(); + const tr = state.tr; tr.addMark(state.selection.from, state.selection.to, mark); - let content = tr.selection.content(); - let newNode = state.schema.nodes.star.create({ visibility: false, text: content, textslice: content.toJSON() }); + const content = tr.selection.content(); + const newNode = state.schema.nodes.star.create({ visibility: false, text: content, textslice: content.toJSON() }); dispatch && dispatch(tr.replaceSelectionWith(newNode).removeMark(tr.selection.from - 1, tr.selection.from, mark)); return true; } public static insertComment(state: EditorState, dispatch: any) { if (state.selection.empty) return false; - let mark = state.schema.marks.highlight.create(); - let tr = state.tr; + const mark = state.schema.marks.highlight.create(); + const tr = state.tr; tr.addMark(state.selection.from, state.selection.to, mark); - let content = tr.selection.content(); - let newNode = state.schema.nodes.star.create({ visibility: false, text: content, textslice: content.toJSON() }); + const content = tr.selection.content(); + const newNode = state.schema.nodes.star.create({ visibility: false, text: content, textslice: content.toJSON() }); dispatch && dispatch(tr.replaceSelectionWith(newNode).removeMark(tr.selection.from - 1, tr.selection.from, mark)); return true; } @@ -710,17 +710,17 @@ export class TooltipTextMenu { class: "menuicon", execEvent: "", render() { - let svg = document.createElementNS("http://www.w3.org/2000/svg", "svg"); + const svg = document.createElementNS("http://www.w3.org/2000/svg", "svg"); svg.setAttribute("viewBox", "-100 -100 650 650"); - let path = document.createElementNS('http://www.w3.org/2000/svg', "path"); + const path = document.createElementNS('http://www.w3.org/2000/svg', "path"); path.setAttributeNS(null, "d", "M0 479.98L99.92 512l35.45-35.45-67.04-67.04L0 479.98zm124.61-240.01a36.592 36.592 0 0 0-10.79 38.1l13.05 42.83-50.93 50.94 96.23 96.23 50.86-50.86 42.74 13.08c13.73 4.2 28.65-.01 38.15-10.78l35.55-41.64-173.34-173.34-41.52 35.44zm403.31-160.7l-63.2-63.2c-20.49-20.49-53.38-21.52-75.12-2.35L190.55 183.68l169.77 169.78L530.27 154.4c19.18-21.74 18.15-54.63-2.35-75.13z"); svg.appendChild(path); - let color = document.createElement("div"); + const color = document.createElement("div"); color.className = "buttonColor"; color.style.backgroundColor = TooltipTextMenuManager.Instance.highlight.toString(); - let wrapper = document.createElement("div"); + const wrapper = document.createElement("div"); wrapper.id = "colorPicker"; wrapper.appendChild(svg); wrapper.appendChild(color); @@ -735,26 +735,26 @@ export class TooltipTextMenu { public static insertHighlight(color: String, state: EditorState, dispatch: any) { if (state.selection.empty) return false; - let highlightMark = state.schema.mark(state.schema.marks.marker, { highlight: color }); + const highlightMark = state.schema.mark(state.schema.marks.marker, { highlight: color }); dispatch(state.tr.addMark(state.selection.from, state.selection.to, highlightMark)); } createHighlightDropdown() { // menu item for color picker - let self = this; - let colors = new MenuItem({ + const self = this; + const colors = new MenuItem({ title: "", execEvent: "", class: "button-setting-disabled", css: "", render() { - let p = document.createElement("p"); + const p = document.createElement("p"); p.textContent = "Change highlight:"; - let colorsWrapper = document.createElement("div"); + const colorsWrapper = document.createElement("div"); colorsWrapper.className = "colorPicker-wrapper"; - let colors = [ + const colors = [ PastelSchemaPalette.get("pink2"), PastelSchemaPalette.get("purple4"), PastelSchemaPalette.get("bluegreen1"), @@ -768,7 +768,7 @@ export class TooltipTextMenu { ]; colors.forEach(color => { - let button = document.createElement("button"); + const button = document.createElement("button"); button.className = color === TooltipTextMenuManager.Instance.highlight ? "colorPicker active" : "colorPicker"; if (color) { button.style.backgroundColor = color; @@ -779,8 +779,8 @@ export class TooltipTextMenu { TooltipTextMenu.insertHighlight(TooltipTextMenuManager.Instance.highlight, self.view.state, self.view.dispatch); // update color menu - let highlightDom = self.createHighlightTool().render(self.view).dom; - let highlightDropdownDom = self.createHighlightDropdown().render(self.view).dom; + const highlightDom = self.createHighlightTool().render(self.view).dom; + const highlightDropdownDom = self.createHighlightDropdown().render(self.view).dom; self.highlightDom && self.tooltip.replaceChild(highlightDom, self.highlightDom); self.highlightDropdownDom && self.tooltip.replaceChild(highlightDropdownDom, self.highlightDropdownDom); self.highlightDom = highlightDom; @@ -790,7 +790,7 @@ export class TooltipTextMenu { colorsWrapper.appendChild(button); }); - let div = document.createElement("div"); + const div = document.createElement("div"); div.appendChild(p); div.appendChild(colorsWrapper); return div; @@ -801,7 +801,7 @@ export class TooltipTextMenu { } }); - let colorDropdown = new Dropdown([colors], { class: "buttonSettings-dropdown" }) as MenuItem; + const colorDropdown = new Dropdown([colors], { class: "buttonSettings-dropdown" }) as MenuItem; return colorDropdown; } @@ -812,17 +812,17 @@ export class TooltipTextMenu { class: "menuicon", execEvent: "", render() { - let svg = document.createElementNS("http://www.w3.org/2000/svg", "svg"); + const svg = document.createElementNS("http://www.w3.org/2000/svg", "svg"); svg.setAttribute("viewBox", "-100 -100 650 650"); - let path = document.createElementNS('http://www.w3.org/2000/svg', "path"); + const path = document.createElementNS('http://www.w3.org/2000/svg', "path"); path.setAttributeNS(null, "d", "M204.3 5C104.9 24.4 24.8 104.3 5.2 203.4c-37 187 131.7 326.4 258.8 306.7 41.2-6.4 61.4-54.6 42.5-91.7-23.1-45.4 9.9-98.4 60.9-98.4h79.7c35.8 0 64.8-29.6 64.9-65.3C511.5 97.1 368.1-26.9 204.3 5zM96 320c-17.7 0-32-14.3-32-32s14.3-32 32-32 32 14.3 32 32-14.3 32-32 32zm32-128c-17.7 0-32-14.3-32-32s14.3-32 32-32 32 14.3 32 32-14.3 32-32 32zm128-64c-17.7 0-32-14.3-32-32s14.3-32 32-32 32 14.3 32 32-14.3 32-32 32zm128 64c-17.7 0-32-14.3-32-32s14.3-32 32-32 32 14.3 32 32-14.3 32-32 32z"); svg.appendChild(path); - let color = document.createElement("div"); + const color = document.createElement("div"); color.className = "buttonColor"; color.style.backgroundColor = TooltipTextMenuManager.Instance.color.toString(); - let wrapper = document.createElement("div"); + const wrapper = document.createElement("div"); wrapper.id = "colorPicker"; wrapper.appendChild(svg); wrapper.appendChild(color); @@ -837,26 +837,26 @@ export class TooltipTextMenu { public static insertColor(color: String, state: EditorState, dispatch: any) { if (state.selection.empty) return false; - let colorMark = state.schema.mark(state.schema.marks.color, { color: color }); + const colorMark = state.schema.mark(state.schema.marks.color, { color: color }); dispatch(state.tr.addMark(state.selection.from, state.selection.to, colorMark)); } createColorDropdown() { // menu item for color picker - let self = this; - let colors = new MenuItem({ + const self = this; + const colors = new MenuItem({ title: "", execEvent: "", class: "button-setting-disabled", css: "", render() { - let p = document.createElement("p"); + const p = document.createElement("p"); p.textContent = "Change color:"; - let colorsWrapper = document.createElement("div"); + const colorsWrapper = document.createElement("div"); colorsWrapper.className = "colorPicker-wrapper"; - let colors = [ + const colors = [ DarkPastelSchemaPalette.get("pink2"), DarkPastelSchemaPalette.get("purple4"), DarkPastelSchemaPalette.get("bluegreen1"), @@ -870,7 +870,7 @@ export class TooltipTextMenu { ]; colors.forEach(color => { - let button = document.createElement("button"); + const button = document.createElement("button"); button.className = color === TooltipTextMenuManager.Instance.color ? "colorPicker active" : "colorPicker"; if (color) { button.style.backgroundColor = color; @@ -880,8 +880,8 @@ export class TooltipTextMenu { TooltipTextMenu.insertColor(TooltipTextMenuManager.Instance.color, self.view.state, self.view.dispatch); // update color menu - let colorDom = self.createColorTool().render(self.view).dom; - let colorDropdownDom = self.createColorDropdown().render(self.view).dom; + const colorDom = self.createColorTool().render(self.view).dom; + const colorDropdownDom = self.createColorDropdown().render(self.view).dom; self.colorDom && self.tooltip.replaceChild(colorDom, self.colorDom); self.colorDropdownDom && self.tooltip.replaceChild(colorDropdownDom, self.colorDropdownDom); self.colorDom = colorDom; @@ -891,7 +891,7 @@ export class TooltipTextMenu { colorsWrapper.appendChild(button); }); - let div = document.createElement("div"); + const div = document.createElement("div"); div.appendChild(p); div.appendChild(colorsWrapper); return div; @@ -902,7 +902,7 @@ export class TooltipTextMenu { } }); - let colorDropdown = new Dropdown([colors], { class: "buttonSettings-dropdown" }) as MenuItem; + const colorDropdown = new Dropdown([colors], { class: "buttonSettings-dropdown" }) as MenuItem; return colorDropdown; } @@ -911,7 +911,7 @@ export class TooltipTextMenu { height: 32, width: 32, path: "M30.828 1.172c-1.562-1.562-4.095-1.562-5.657 0l-5.379 5.379-3.793-3.793-4.243 4.243 3.326 3.326-14.754 14.754c-0.252 0.252-0.358 0.592-0.322 0.921h-0.008v5c0 0.552 0.448 1 1 1h5c0 0 0.083 0 0.125 0 0.288 0 0.576-0.11 0.795-0.329l14.754-14.754 3.326 3.326 4.243-4.243-3.793-3.793 5.379-5.379c1.562-1.562 1.562-4.095 0-5.657zM5.409 30h-3.409v-3.409l14.674-14.674 3.409 3.409-14.674 14.674z" }; - let self = this; + const self = this; return new MenuItem({ title: "Brush tool", label: "Brush tool", @@ -923,7 +923,7 @@ export class TooltipTextMenu { this.brush_function(state, dispatch); // update dropdown with marks - let newBrushDropdowndom = self.createBrushDropdown().render(self.view).dom; + const newBrushDropdowndom = self.createBrushDropdown().render(self.view).dom; self._brushDropdownDom && self.tooltip.replaceChild(newBrushDropdowndom, self._brushDropdownDom); self._brushDropdownDom = newBrushDropdowndom; }, @@ -947,7 +947,7 @@ export class TooltipTextMenu { } } else { - let { from, to, $from } = this.view.state.selection; + const { from, to, $from } = this.view.state.selection; if (this._brushdom) { if (!this.view.state.selection.empty && $from && $from.nodeAfter) { if (TooltipTextMenuManager.Instance._brushMarks && to - from > 0) { @@ -982,7 +982,7 @@ export class TooltipTextMenu { } - let brushInfo = new MenuItem({ + const brushInfo = new MenuItem({ title: "", label: label, execEvent: "", @@ -994,17 +994,17 @@ export class TooltipTextMenu { } }); - let self = this; - let clearBrush = new MenuItem({ + const self = this; + const clearBrush = new MenuItem({ title: "Clear brush", execEvent: "", class: "separated-button", css: "", render() { - let button = document.createElement("button"); + const button = document.createElement("button"); button.textContent = "Clear brush"; - let wrapper = document.createElement("div"); + const wrapper = document.createElement("div"); wrapper.appendChild(button); return wrapper; }, @@ -1015,24 +1015,24 @@ export class TooltipTextMenu { // update brush tool // TODO: this probably isn't very clean - let newBrushdom = self.createBrush().render(self.view).dom; + const newBrushdom = self.createBrush().render(self.view).dom; self._brushdom && self.tooltip.replaceChild(newBrushdom, self._brushdom); self._brushdom = newBrushdom; - let newBrushDropdowndom = self.createBrushDropdown().render(self.view).dom; + const newBrushDropdowndom = self.createBrushDropdown().render(self.view).dom; self._brushDropdownDom && self.tooltip.replaceChild(newBrushDropdowndom, self._brushDropdownDom); self._brushDropdownDom = newBrushDropdowndom; } }); - let hasMarks = TooltipTextMenuManager.Instance._brushMarks && TooltipTextMenuManager.Instance._brushMarks.size > 0; - let brushDom = new Dropdown(hasMarks ? [brushInfo, clearBrush] : [brushInfo], { class: "buttonSettings-dropdown" }) as MenuItem; + const hasMarks = TooltipTextMenuManager.Instance._brushMarks && TooltipTextMenuManager.Instance._brushMarks.size > 0; + const brushDom = new Dropdown(hasMarks ? [brushInfo, clearBrush] : [brushInfo], { class: "buttonSettings-dropdown" }) as MenuItem; return brushDom; } //for a specific grouping of marks (passed in), remove all and apply the passed-in one to the selected textchangeToMarkInGroup = (markType: MarkType | undefined, view: EditorView, fontMarks: MarkType[]) => { changeToMarkInGroup = (markType: MarkType | undefined, view: EditorView, fontMarks: MarkType[]) => { - let { $cursor, ranges } = view.state.selection as TextSelection; - let state = view.state; - let dispatch = view.dispatch; + const { $cursor, ranges } = view.state.selection as TextSelection; + const state = view.state; + const dispatch = view.dispatch; //remove all other active font marks fontMarks.forEach((type) => { @@ -1044,10 +1044,10 @@ export class TooltipTextMenu { } else { let has = false; for (let i = 0; !has && i < ranges.length; i++) { - let { $from, $to } = ranges[i]; + const { $from, $to } = ranges[i]; has = state.doc.rangeHasMark($from.pos, $to.pos, type); } - for (let i of ranges) { + for (const i of ranges) { if (has) { toggleMark(type)(view.state, view.dispatch, view); } @@ -1059,7 +1059,7 @@ export class TooltipTextMenu { if (markType) { //actually apply font if ((view.state.selection as any).node && (view.state.selection as any).node.type === view.state.schema.nodes.ordered_list) { - let status = updateBullets(view.state.tr.setNodeMarkup(view.state.selection.from, (view.state.selection as any).node.type, + const status = updateBullets(view.state.tr.setNodeMarkup(view.state.selection.from, (view.state.selection as any).node.type, { ...(view.state.selection as NodeSelection).node.attrs, setFontFamily: markType.name, setFontSize: Number(markType.name.replace(/p/, "")) }), view.state.schema); view.dispatch(status.setSelection(new NodeSelection(status.doc.resolve(view.state.selection.from)))); } @@ -1068,9 +1068,9 @@ export class TooltipTextMenu { } changeToFontFamily = (mark: Mark, view: EditorView) => { - let { $cursor, ranges } = view.state.selection as TextSelection; - let state = view.state; - let dispatch = view.dispatch; + const { $cursor, ranges } = view.state.selection as TextSelection; + const state = view.state; + const dispatch = view.dispatch; //remove all other active font marks if ($cursor) { @@ -1080,28 +1080,28 @@ export class TooltipTextMenu { } else { let has = false; for (let i = 0; !has && i < ranges.length; i++) { - let { $from, $to } = ranges[i]; + const { $from, $to } = ranges[i]; has = state.doc.rangeHasMark($from.pos, $to.pos, view.state.schema.marks.pFontFamily); } - for (let i of ranges) { + for (const i of ranges) { if (has) { toggleMark(view.state.schema.marks.pFontFamily)(view.state, view.dispatch, view); } } } - let fontName = mark.attrs.family; + const fontName = mark.attrs.family; if (fontName) { this.updateFontStyleDropdown(fontName); } if (this.editorProps) { - let ruleProvider = this.editorProps.ruleProvider; - let heading = NumCast(this.editorProps.Document.heading); + const ruleProvider = this.editorProps.ruleProvider; + const heading = NumCast(this.editorProps.Document.heading); if (ruleProvider && heading) { ruleProvider["ruleFont_" + heading] = fontName; } } //actually apply font if ((view.state.selection as any).node && (view.state.selection as any).node.type === view.state.schema.nodes.ordered_list) { - let status = updateBullets(view.state.tr.setNodeMarkup(view.state.selection.from, (view.state.selection as any).node.type, + const status = updateBullets(view.state.tr.setNodeMarkup(view.state.selection.from, (view.state.selection as any).node.type, { ...(view.state.selection as NodeSelection).node.attrs, setFontFamily: fontName }), view.state.schema); view.dispatch(status.setSelection(new NodeSelection(status.doc.resolve(view.state.selection.from)))); } @@ -1110,9 +1110,9 @@ export class TooltipTextMenu { } changeToFontSize = (mark: Mark, view: EditorView) => { - let { $cursor, ranges } = view.state.selection as TextSelection; - let state = view.state; - let dispatch = view.dispatch; + const { $cursor, ranges } = view.state.selection as TextSelection; + const state = view.state; + const dispatch = view.dispatch; //remove all other active font marks if ($cursor) { @@ -1122,28 +1122,28 @@ export class TooltipTextMenu { } else { let has = false; for (let i = 0; !has && i < ranges.length; i++) { - let { $from, $to } = ranges[i]; + const { $from, $to } = ranges[i]; has = state.doc.rangeHasMark($from.pos, $to.pos, view.state.schema.marks.pFontSize); } - for (let i of ranges) { + for (const i of ranges) { if (has) { toggleMark(view.state.schema.marks.pFontSize)(view.state, view.dispatch, view); } } } - let size = mark.attrs.fontSize; + const size = mark.attrs.fontSize; if (size) { this.updateFontSizeDropdown(String(size) + " pt"); } if (this.editorProps) { - let ruleProvider = this.editorProps.ruleProvider; - let heading = NumCast(this.editorProps.Document.heading); + const ruleProvider = this.editorProps.ruleProvider; + const heading = NumCast(this.editorProps.Document.heading); if (ruleProvider && heading) { ruleProvider["ruleSize_" + heading] = size; } } //actually apply font if ((view.state.selection as any).node && (view.state.selection as any).node.type === view.state.schema.nodes.ordered_list) { - let status = updateBullets(view.state.tr.setNodeMarkup(view.state.selection.from, (view.state.selection as any).node.type, + const status = updateBullets(view.state.tr.setNodeMarkup(view.state.selection.from, (view.state.selection as any).node.type, { ...(view.state.selection as NodeSelection).node.attrs, setFontSize: size }), view.state.schema); view.dispatch(status.setSelection(new NodeSelection(status.doc.resolve(view.state.selection.from)))); } @@ -1154,20 +1154,20 @@ export class TooltipTextMenu { //remove all node typeand apply the passed-in one to the selected text changeToNodeType = (nodeType: NodeType | undefined) => { //remove oldif (nodeType) { //add new - let view = this.view; + const view = this.view; if (nodeType === schema.nodes.bullet_list) { wrapInList(nodeType)(view.state, view.dispatch); } else { - var marks = view.state.storedMarks || (view.state.selection.$to.parentOffset && view.state.selection.$from.marks()); + const marks = view.state.storedMarks || (view.state.selection.$to.parentOffset && view.state.selection.$from.marks()); if (!wrapInList(schema.nodes.ordered_list)(view.state, (tx2: any) => { - let tx3 = updateBullets(tx2, schema, nodeType && (nodeType as any).attrs.mapStyle); + const tx3 = updateBullets(tx2, schema, nodeType && (nodeType as any).attrs.mapStyle); marks && tx3.ensureMarks([...marks]); marks && tx3.setStoredMarks([...marks]); view.dispatch(tx2); })) { - let tx2 = view.state.tr; - let tx3 = updateBullets(tx2, schema, nodeType && (nodeType as any).attrs.mapStyle); + const tx2 = view.state.tr; + const tx3 = updateBullets(tx2, schema, nodeType && (nodeType as any).attrs.mapStyle); marks && tx3.ensureMarks([...marks]); marks && tx3.setStoredMarks([...marks]); @@ -1223,15 +1223,15 @@ export class TooltipTextMenu { }); } - markActive = function(state: EditorState, type: MarkType>) { - let { from, $from, to, empty } = state.selection; + markActive = function (state: EditorState, type: MarkType>) { + const { from, $from, to, empty } = state.selection; if (empty) return type.isInSet(state.storedMarks || $from.marks()); else return state.doc.rangeHasMark(from, to, type); }; // Helper function to create menu icons icon(text: string, name: string, title: string = name) { - let span = document.createElement("span"); + const span = document.createElement("span"); span.className = name + " menuicon"; span.title = title; span.textContent = text; @@ -1240,13 +1240,13 @@ export class TooltipTextMenu { } svgIcon(name: string, title: string = name, dpath: string) { - let svg = document.createElementNS("http://www.w3.org/2000/svg", "svg"); + const svg = document.createElementNS("http://www.w3.org/2000/svg", "svg"); svg.setAttribute("viewBox", "-100 -100 650 650"); - let path = document.createElementNS('http://www.w3.org/2000/svg', "path"); + const path = document.createElementNS('http://www.w3.org/2000/svg', "path"); path.setAttributeNS(null, "d", dpath); svg.appendChild(path); - let span = document.createElement("span"); + const span = document.createElement("span"); span.className = name + " menuicon"; span.title = title; span.appendChild(svg); @@ -1256,9 +1256,9 @@ export class TooltipTextMenu { //method for checking whether node can be inserted canInsert(state: EditorState, nodeType: NodeType>) { - let $from = state.selection.$from; + const $from = state.selection.$from; for (let d = $from.depth; d >= 0; d--) { - let index = $from.index(d); + const index = $from.index(d); if ($from.node(d).canReplaceWith(index, index, nodeType)) return true; } return false; @@ -1267,13 +1267,13 @@ export class TooltipTextMenu { //adapted this method - use it to check if block has a tag (ie bulleting) blockActive(type: NodeType>, state: EditorState) { - let attrs = {}; + const attrs = {}; if (state.selection instanceof NodeSelection) { const sel: NodeSelection = state.selection; - let $from = sel.$from; - let to = sel.to; - let node = sel.node; + const $from = sel.$from; + const to = sel.to; + const node = sel.node; if (node) { return node.hasMarkup(type, attrs); @@ -1292,10 +1292,10 @@ export class TooltipTextMenu { } getMarksInSelection(state: EditorState) { - let found = new Set(); - let { from, to } = state.selection as TextSelection; + const found = new Set(); + const { from, to } = state.selection as TextSelection; state.doc.nodesBetween(from, to, (node) => { - let marks = node.marks; + const marks = node.marks; if (marks) { marks.forEach(m => { found.add(m); @@ -1306,7 +1306,7 @@ export class TooltipTextMenu { } reset_mark_doms() { - let iterator = this._marksToDoms.values(); + const iterator = this._marksToDoms.values(); let next = iterator.next(); while (!next.done) { next.value.style.color = "white"; @@ -1322,7 +1322,7 @@ export class TooltipTextMenu { return; } this.view = view; - let state = view.state; + const state = view.state; DocumentDecorations.Instance.showTextBar(); props && (this.editorProps = props); // Don't do anything if the document/selection didn't change @@ -1338,13 +1338,13 @@ export class TooltipTextMenu { } // update link dropdown - let linkDropdown = await this.createLinkDropdown(); - let newLinkDropdowndom = linkDropdown.render(this.view).dom; + const linkDropdown = await this.createLinkDropdown(); + const newLinkDropdowndom = linkDropdown.render(this.view).dom; this._linkDropdownDom && this.tooltip.replaceChild(newLinkDropdowndom, this._linkDropdownDom); this._linkDropdownDom = newLinkDropdowndom; //UPDATE FONT STYLE DROPDOWN - let activeStyles = this.activeFontFamilyOnSelection(); + const activeStyles = this.activeFontFamilyOnSelection(); if (activeStyles !== undefined) { if (activeStyles.length === 1) { console.log("updating font style dropdown", activeStyles[0]); @@ -1353,7 +1353,7 @@ export class TooltipTextMenu { } //UPDATE FONT SIZE DROPDOWN - let activeSizes = this.activeFontSizeOnSelection(); + const activeSizes = this.activeFontSizeOnSelection(); if (activeSizes !== undefined) { if (activeSizes.length === 1) { //if there's only one active font size activeSizes[0] && this.updateFontSizeDropdown(String(activeSizes[0]) + " pt"); @@ -1366,7 +1366,7 @@ export class TooltipTextMenu { this.reset_mark_doms(); this._activeMarks.forEach((mark) => { if (this._marksToDoms.has(mark)) { - let dom = this._marksToDoms.get(mark); + const dom = this._marksToDoms.get(mark); if (dom) dom.style.color = "greenyellow"; } }); @@ -1385,8 +1385,8 @@ export class TooltipTextMenu { //finds fontSize at start of selection activeFontSizeOnSelection() { //current selection - let state = this.view.state; - let activeSizes: number[] = []; + const state = this.view.state; + const activeSizes: number[] = []; const pos = this.view.state.selection.$from; const ref_node: ProsNode = this.reference_node(pos); if (ref_node && ref_node !== this.view.state.doc && ref_node.isText) { @@ -1397,8 +1397,8 @@ export class TooltipTextMenu { //finds fontSize at start of selection activeFontFamilyOnSelection() { //current selection - let state = this.view.state; - let activeFamilies: string[] = []; + const state = this.view.state; + const activeFamilies: string[] = []; const pos = this.view.state.selection.$from; const ref_node: ProsNode = this.reference_node(pos); if (ref_node && ref_node !== this.view.state.doc && ref_node.isText) { @@ -1409,15 +1409,15 @@ export class TooltipTextMenu { //finds all active marks on selection in given group activeMarksOnSelection(markGroup: MarkType[]) { //current selection - let { empty, ranges, $to } = this.view.state.selection as TextSelection; - let state = this.view.state; - let dispatch = this.view.dispatch; + const { empty, ranges, $to } = this.view.state.selection as TextSelection; + const state = this.view.state; + const dispatch = this.view.dispatch; let activeMarks: MarkType[]; if (!empty) { activeMarks = markGroup.filter(mark => { - let has = false; + const has = false; for (let i = 0; !has && i < ranges.length; i++) { - let { $from, $to } = ranges[i]; + const { $from, $to } = ranges[i]; return state.doc.rangeHasMark($from.pos, $to.pos, mark); } return false; @@ -1440,7 +1440,7 @@ export class TooltipTextMenu { if (mark_type === state.schema.marks.pFontSize) { return ref_node.marks.some(m => m.type.name === state.schema.marks.pFontSize.name); } - let mark = state.schema.mark(mark_type); + const mark = state.schema.mark(mark_type); return ref_node.marks.includes(mark); return false; }); diff --git a/src/client/util/TypedEvent.ts b/src/client/util/TypedEvent.ts index 532ba78eb..90fd299c1 100644 --- a/src/client/util/TypedEvent.ts +++ b/src/client/util/TypedEvent.ts @@ -1,40 +1,40 @@ export interface Listener { - (event: T): any; + (event: T): any; } export interface Disposable { - dispose(): void; + dispose(): void; } /** passes through events as they happen. You will not get events from before you start listening */ export class TypedEvent { - private listeners: Listener[] = []; - private listenersOncer: Listener[] = []; - - on = (listener: Listener): Disposable => { - this.listeners.push(listener); - return { - dispose: () => this.off(listener) - }; - } - - once = (listener: Listener): void => { - this.listenersOncer.push(listener); - } - - off = (listener: Listener) => { - var callbackIndex = this.listeners.indexOf(listener); - if (callbackIndex > -1) this.listeners.splice(callbackIndex, 1); - } - - emit = (event: T) => { - /** Update any general listeners */ - this.listeners.forEach((listener) => listener(event)); - - /** Clear the `once` queue */ - this.listenersOncer.forEach((listener) => listener(event)); - this.listenersOncer = []; - } - - pipe = (te: TypedEvent): Disposable => this.on((e) => te.emit(e)); + private listeners: Listener[] = []; + private listenersOncer: Listener[] = []; + + on = (listener: Listener): Disposable => { + this.listeners.push(listener); + return { + dispose: () => this.off(listener) + }; + } + + once = (listener: Listener): void => { + this.listenersOncer.push(listener); + } + + off = (listener: Listener) => { + const callbackIndex = this.listeners.indexOf(listener); + if (callbackIndex > -1) this.listeners.splice(callbackIndex, 1); + } + + emit = (event: T) => { + /** Update any general listeners */ + this.listeners.forEach((listener) => listener(event)); + + /** Clear the `once` queue */ + this.listenersOncer.forEach((listener) => listener(event)); + this.listenersOncer = []; + } + + pipe = (te: TypedEvent): Disposable => this.on((e) => te.emit(e)); } \ No newline at end of file diff --git a/src/client/util/UndoManager.ts b/src/client/util/UndoManager.ts index 472afac1d..314b52bf3 100644 --- a/src/client/util/UndoManager.ts +++ b/src/client/util/UndoManager.ts @@ -3,7 +3,7 @@ import 'source-map-support/register'; import { Without } from "../../Utils"; function getBatchName(target: any, key: string | symbol): string { - let keyName = key.toString(); + const keyName = key.toString(); if (target && target.constructor && target.constructor.name) { return `${target.constructor.name}.${keyName}`; } @@ -23,7 +23,7 @@ function propertyDecorator(target: any, key: string | symbol) { writable: true, configurable: true, value: function (...args: any[]) { - let batch = UndoManager.StartBatch(getBatchName(target, key)); + const batch = UndoManager.StartBatch(getBatchName(target, key)); try { return value.apply(this, args); } finally { @@ -40,7 +40,7 @@ export function undoBatch(fn: (...args: any[]) => any): (...args: any[]) => any; export function undoBatch(target: any, key?: string | symbol, descriptor?: TypedPropertyDescriptor): any { if (!key) { return function () { - let batch = UndoManager.StartBatch(""); + const batch = UndoManager.StartBatch(""); try { return target.apply(undefined, arguments); } finally { @@ -55,7 +55,7 @@ export function undoBatch(target: any, key?: string | symbol, descriptor?: Typed const oldFunction = descriptor.value; descriptor.value = function (...args: any[]) { - let batch = UndoManager.StartBatch(getBatchName(target, key)); + const batch = UndoManager.StartBatch(getBatchName(target, key)); try { return oldFunction.apply(this, args); } finally { @@ -98,7 +98,7 @@ export namespace UndoManager { GetOpenBatches().forEach(batch => console.log(batch.batchName)); } - let openBatches: Batch[] = []; + const openBatches: Batch[] = []; export function GetOpenBatches(): Without[] { return openBatches; } @@ -146,7 +146,7 @@ export namespace UndoManager { //TODO Make this return the return value export function RunInBatch(fn: () => T, batchName: string) { - let batch = StartBatch(batchName); + const batch = StartBatch(batchName); try { return runInAction(fn); } finally { @@ -159,7 +159,7 @@ export namespace UndoManager { return; } - let commands = undoStack.pop(); + const commands = undoStack.pop(); if (!commands) { return; } @@ -178,7 +178,7 @@ export namespace UndoManager { return; } - let commands = redoStack.pop(); + const commands = redoStack.pop(); if (!commands) { return; } diff --git a/src/client/views/CollectionLinearView.tsx b/src/client/views/CollectionLinearView.tsx index f718735a8..09e4ef99c 100644 --- a/src/client/views/CollectionLinearView.tsx +++ b/src/client/views/CollectionLinearView.tsx @@ -48,12 +48,12 @@ export class CollectionLinearView extends CollectionSubView(LinearDocument) { dimension = () => NumCast(this.props.Document.height); // 2 * the padding getTransform = (ele: React.RefObject) => () => { if (!ele.current) return Transform.Identity(); - let { scale, translateX, translateY } = Utils.GetScreenTransform(ele.current); + const { scale, translateX, translateY } = Utils.GetScreenTransform(ele.current); return new Transform(-translateX, -translateY, 1 / scale); } render() { - let guid = Utils.GenerateGuid(); + const guid = Utils.GenerateGuid(); return
      {this.childLayoutPairs.filter(pair => this.isCurrent(pair.layout)).map(pair => { - let nested = pair.layout.viewType === CollectionViewType.Linear; - let dref = React.createRef(); - let nativeWidth = NumCast(pair.layout.nativeWidth, this.dimension()); - let deltaSize = nativeWidth * .15 / 2; + const nested = pair.layout.viewType === CollectionViewType.Linear; + const dref = React.createRef(); + const nativeWidth = NumCast(pair.layout.nativeWidth, this.dimension()); + const deltaSize = nativeWidth * .15 / 2; return
      { this._mouseDown = false; - let curX = e.clientX; - let curY = e.clientY; + const curX = e.clientX; + const curY = e.clientY; if (this._mouseX !== curX || this._mouseY !== curY) { this._shouldDisplay = false; } @@ -208,7 +208,7 @@ export class ContextMenu extends React.Component { if (!this._display) { return null; } - let style = this._yRelativeToTop ? { left: this.pageX, top: this.pageY } : + const style = this._yRelativeToTop ? { left: this.pageX, top: this.pageY } : { left: this.pageX, bottom: this.pageY }; const contents = ( diff --git a/src/client/views/ContextMenuItem.tsx b/src/client/views/ContextMenuItem.tsx index 330b94afa..fef9e5f60 100644 --- a/src/client/views/ContextMenuItem.tsx +++ b/src/client/views/ContextMenuItem.tsx @@ -88,7 +88,7 @@ export class ContextMenuItem extends React.Component ); } else if ("subitems" in this.props) { - let submenu = !this.overItem ? (null) : + const submenu = !this.overItem ? (null) :
      {this._items.map(prop => )}
      ; diff --git a/src/client/views/DictationOverlay.tsx b/src/client/views/DictationOverlay.tsx index 2accf9bfd..65770c0bb 100644 --- a/src/client/views/DictationOverlay.tsx +++ b/src/client/views/DictationOverlay.tsx @@ -24,7 +24,7 @@ export class DictationOverlay extends React.Component { } public initiateDictationFade = () => { - let duration = DictationManager.Commands.dictationFadeDuration; + const duration = DictationManager.Commands.dictationFadeDuration; this.overlayTimeout = setTimeout(() => { this.dictationOverlayVisible = false; this.dictationSuccess = undefined; @@ -50,14 +50,14 @@ export class DictationOverlay extends React.Component { public set isListening(value: DictationManager.Controls.ListeningUIStatus) { runInAction(() => this._dictationListeningState = value); } render() { - let success = this.dictationSuccess; - let result = this.isListening && !this.isListening.interim ? DictationManager.placeholder : `"${this.dictatedPhrase}"`; - let dialogueBoxStyle = { + const success = this.dictationSuccess; + const result = this.isListening && !this.isListening.interim ? DictationManager.placeholder : `"${this.dictatedPhrase}"`; + const dialogueBoxStyle = { background: success === undefined ? "gainsboro" : success ? "lawngreen" : "red", borderColor: this.isListening ? "red" : "black", fontStyle: "italic" }; - let overlayStyle = { + const overlayStyle = { backgroundColor: this.isListening ? "red" : "darkslategrey" }; return ((schema @action.bound removeDocument(doc: Doc): boolean { Doc.GetProto(doc).annotationOn = undefined; - let value = this.extensionDoc && Cast(this.extensionDoc[this.annotationsKey], listSpec(Doc), []); - let index = value ? Doc.IndexOf(doc, value.map(d => d as Doc), true) : -1; + const value = this.extensionDoc && Cast(this.extensionDoc[this.annotationsKey], listSpec(Doc), []); + const index = value ? Doc.IndexOf(doc, value.map(d => d as Doc), true) : -1; return index !== -1 && value && value.splice(index, 1) ? true : false; } // if the moved document is already in this overlay collection nothing needs to be done. diff --git a/src/client/views/DocumentButtonBar.scss b/src/client/views/DocumentButtonBar.scss index db6bf2ba0..c2ca93900 100644 --- a/src/client/views/DocumentButtonBar.scss +++ b/src/client/views/DocumentButtonBar.scss @@ -17,6 +17,7 @@ $linkGap : 3px; transform: scale(1.05); cursor: pointer; } + .documentButtonBar-linkButton-empty, .documentButtonBar-linkButton-nonempty { height: 20px; @@ -74,6 +75,31 @@ $linkGap : 3px; } -@-moz-keyframes spin { 100% { -moz-transform: rotate(360deg); } } -@-webkit-keyframes spin { 100% { -webkit-transform: rotate(360deg); } } -@keyframes spin { 100% { -webkit-transform: rotate(360deg); transform: rotate(360deg); } } \ No newline at end of file +@-moz-keyframes spin { + 100% { + -moz-transform: rotate(360deg); + } +} + +@-webkit-keyframes spin { + 100% { + -webkit-transform: rotate(360deg); + } +} + +@keyframes spin { + 100% { + -webkit-transform: rotate(360deg); + transform: rotate(360deg); + } +} + +@keyframes shadow-pulse { + 0% { + box-shadow: 0 0 0 0px rgba(0, 0, 0, 0.8); + } + + 100% { + box-shadow: 0 0 0 10px rgba(0, 255, 0, 0); + } +} \ No newline at end of file diff --git a/src/client/views/DocumentButtonBar.tsx b/src/client/views/DocumentButtonBar.tsx index 1fefc70f1..15cbd2788 100644 --- a/src/client/views/DocumentButtonBar.tsx +++ b/src/client/views/DocumentButtonBar.tsx @@ -21,6 +21,7 @@ import React = require("react"); import { DocumentView } from './nodes/DocumentView'; import { ParentDocSelector } from './collections/ParentDocumentSelector'; import { CollectionDockingView } from './collections/CollectionDockingView'; +import { DocumentDecorations } from './DocumentDecorations'; const higflyout = require("@hig/flyout"); export const { anchorPoints } = higflyout; export const Flyout = higflyout.default; @@ -51,7 +52,9 @@ export class DocumentButtonBar extends React.Component<{ views: DocumentView[], @observable private pushIcon: IconProp = "arrow-alt-circle-up"; @observable private pullIcon: IconProp = "arrow-alt-circle-down"; @observable private pullColor: string = "white"; - @observable private isAnimatingFetch = false; + @observable public isAnimatingFetch = false; + @observable public isAnimatingPulse = false; + @observable private openHover = false; public static Instance: DocumentButtonBar; @@ -75,6 +78,7 @@ export class DocumentButtonBar extends React.Component<{ views: DocumentView[], }); public startPushOutcome = action((success: boolean) => { + this.isAnimatingPulse = false; if (!this._pushAnimating) { this._pushAnimating = true; this.pushIcon = success ? "check-circle" : "stop-circle"; @@ -99,27 +103,26 @@ export class DocumentButtonBar extends React.Component<{ views: DocumentView[], this._pullColorAnimating = false; }); - @action onLinkButtonMoved = (e: PointerEvent): void => { if (this._linkButton.current !== null && (Math.abs(e.clientX - this._downX) > 3 || Math.abs(e.clientY - this._downY) > 3)) { document.removeEventListener("pointermove", this.onLinkButtonMoved); document.removeEventListener("pointerup", this.onLinkButtonUp); - let docView = this.props.views[0]; - let container = docView.props.ContainingCollectionDoc?.proto; - let dragData = new DragManager.LinkDragData(docView.props.Document, container ? [container] : []); - let linkDrag = UndoManager.StartBatch("Drag Link"); + const docView = this.props.views[0]; + const container = docView.props.ContainingCollectionDoc?.proto; + const dragData = new DragManager.LinkDragData(docView.props.Document, container ? [container] : []); + const linkDrag = UndoManager.StartBatch("Drag Link"); DragManager.StartLinkDrag(this._linkButton.current, dragData, e.pageX, e.pageY, { handlers: { dragComplete: () => { - let tooltipmenu = FormattedTextBox.ToolTipTextMenu; - let linkDoc = dragData.linkDocument; + const tooltipmenu = FormattedTextBox.ToolTipTextMenu; + const linkDoc = dragData.linkDocument; if (linkDoc && tooltipmenu) { - let proto = Doc.GetProto(linkDoc); + const proto = Doc.GetProto(linkDoc); if (proto && docView) { proto.sourceContext = docView.props.ContainingCollectionDoc; } - let text = tooltipmenu.makeLink(linkDoc, StrCast(linkDoc.anchor2.title), e.ctrlKey ? "onRight" : "inTab"); + const text = tooltipmenu.makeLink(linkDoc, StrCast(linkDoc.anchor2.title), e.ctrlKey ? "onRight" : "inTab"); if (linkDoc instanceof Doc && linkDoc.anchor2 instanceof Doc) { proto.title = text === "" ? proto.title : text + " to " + linkDoc.anchor2.title; // TODODO open to more descriptive descriptions of following in text link } @@ -152,21 +155,27 @@ export class DocumentButtonBar extends React.Component<{ views: DocumentView[], @computed get considerGoogleDocsPush() { - let targetDoc = this.props.views[0].props.Document; - let published = Doc.GetProto(targetDoc)[GoogleRef] !== undefined; - return
      { - DocumentButtonBar.hasPushedHack = false; - targetDoc[Pushes] = NumCast(targetDoc[Pushes]) + 1; - }}> + const targetDoc = this.props.views[0].props.Document; + const published = Doc.GetProto(targetDoc)[GoogleRef] !== undefined; + const animation = this.isAnimatingPulse ? "shadow-pulse 1s linear infinite" : "none"; + return
      { + !published && runInAction(() => this.isAnimatingPulse = true); + DocumentButtonBar.hasPushedHack = false; + targetDoc[Pushes] = NumCast(targetDoc[Pushes]) + 1; + }}>
      ; } @computed get considerGoogleDocsPull() { - let targetDoc = this.props.views[0].props.Document; - let dataDoc = Doc.GetProto(targetDoc); - let animation = this.isAnimatingFetch ? "spin 0.5s linear infinite" : "none"; + const targetDoc = this.props.views[0].props.Document; + const dataDoc = Doc.GetProto(targetDoc); + const animation = this.isAnimatingFetch ? "spin 0.5s linear infinite" : "none"; return !dataDoc[GoogleRef] ? (null) :
      }> @@ -214,13 +223,13 @@ export class DocumentButtonBar extends React.Component<{ views: DocumentView[], } render() { - let templates: Map = new Map(); + const templates: Map = new Map(); Array.from(Object.values(Templates.TemplateList)).map(template => templates.set(template, this.props.views.reduce((checked, doc) => checked || doc.getLayoutPropStr("show" + template.Name) ? true : false, false as boolean))); - let isText = this.props.views[0].props.Document.data instanceof RichTextField; // bcz: Todo - can't assume layout is using the 'data' field. need to add fieldKey to DocumentView - let considerPull = isText && this.considerGoogleDocsPull; - let considerPush = isText && this.considerGoogleDocsPush; + const isText = this.props.views[0].props.Document.data instanceof RichTextField; // bcz: Todo - can't assume layout is using the 'data' field. need to add fieldKey to DocumentView + const considerPull = isText && this.considerGoogleDocsPull; + const considerPush = isText && this.considerGoogleDocsPush; return
      {this.linkButton} diff --git a/src/client/views/DocumentDecorations.tsx b/src/client/views/DocumentDecorations.tsx index 66f47147f..7b0c31e37 100644 --- a/src/client/views/DocumentDecorations.tsx +++ b/src/client/views/DocumentDecorations.tsx @@ -24,7 +24,6 @@ import { DocumentView } from "./nodes/DocumentView"; import { FieldView } from "./nodes/FieldView"; import { IconBox } from "./nodes/IconBox"; import React = require("react"); -import { PointData } from '../../new_fields/InkField'; import { DocumentType } from '../documents/DocumentTypes'; const higflyout = require("@hig/flyout"); export const { anchorPoints } = higflyout; @@ -68,8 +67,6 @@ export class DocumentDecorations extends React.Component<{}, { value: string }> @observable public pushIcon: IconProp = "arrow-alt-circle-up"; @observable public pullIcon: IconProp = "arrow-alt-circle-down"; @observable public pullColor: string = "white"; - @observable public isAnimatingFetch = false; - @observable public isAnimatingPulse = false; @observable public openHover = false; constructor(props: Readonly<{}>) { @@ -82,25 +79,25 @@ export class DocumentDecorations extends React.Component<{}, { value: string }> @action titleChanged = (event: any) => { this._title = event.target.value; }; @action titleBlur = () => { this._edtingTitle = false; }; @action titleEntered = (e: any) => { - var key = e.keyCode || e.which; + const key = e.keyCode || e.which; // enter pressed if (key === 13) { - var text = e.target.value; + const text = e.target.value; if (text[0] === '#') { this._fieldKey = text.slice(1, text.length); this._title = this.selectionTitle; } else if (text.startsWith("::")) { - let targetID = text.slice(2, text.length); - let promoteDoc = SelectionManager.SelectedDocuments()[0]; + const targetID = text.slice(2, text.length); + const promoteDoc = SelectionManager.SelectedDocuments()[0]; DocUtils.Publish(promoteDoc.props.Document, targetID, promoteDoc.props.addDocument, promoteDoc.props.removeDocument); } else if (text.startsWith(">")) { - let fieldTemplateView = SelectionManager.SelectedDocuments()[0]; + const fieldTemplateView = SelectionManager.SelectedDocuments()[0]; SelectionManager.DeselectAll(); - let fieldTemplate = fieldTemplateView.props.Document; - let containerView = fieldTemplateView.props.ContainingCollectionView; - let docTemplate = fieldTemplateView.props.ContainingCollectionDoc; + const fieldTemplate = fieldTemplateView.props.Document; + const containerView = fieldTemplateView.props.ContainingCollectionView; + const docTemplate = fieldTemplateView.props.ContainingCollectionDoc; if (containerView && docTemplate) { - let metaKey = text.startsWith(">>") ? text.slice(2, text.length) : text.slice(1, text.length); + const metaKey = text.startsWith(">>") ? text.slice(2, text.length) : text.slice(1, text.length); if (metaKey !== containerView.props.fieldKey && containerView.props.DataDoc) { const fd = fieldTemplate.data; fd instanceof ObjectField && (Doc.GetProto(containerView.props.DataDoc)[metaKey] = ObjectField.MakeCopy(fd)); @@ -115,15 +112,15 @@ export class DocumentDecorations extends React.Component<{}, { value: string }> else { if (SelectionManager.SelectedDocuments().length > 0) { SelectionManager.SelectedDocuments()[0].props.Document.customTitle = !this._title.startsWith("-"); - let field = SelectionManager.SelectedDocuments()[0].props.Document[this._fieldKey]; + const field = SelectionManager.SelectedDocuments()[0].props.Document[this._fieldKey]; if (typeof field === "number") { SelectionManager.SelectedDocuments().forEach(d => { - let doc = d.props.Document.proto ? d.props.Document.proto : d.props.Document; + const doc = d.props.Document.proto ? d.props.Document.proto : d.props.Document; doc[this._fieldKey] = +this._title; }); } else { SelectionManager.SelectedDocuments().forEach(d => { - let doc = d.props.Document.proto ? d.props.Document.proto : d.props.Document; + const doc = d.props.Document.proto ? d.props.Document.proto : d.props.Document; doc[this._fieldKey] = this._title; }); } @@ -165,11 +162,11 @@ export class DocumentDecorations extends React.Component<{}, { value: string }> Doc.AreProtosEqual(documentView.props.Document, CurrentUserUtils.UserDocument)) { return bounds; } - let transform = (documentView.props.ScreenToLocalTransform().scale(documentView.props.ContentScaling())).inverse(); + const transform = (documentView.props.ScreenToLocalTransform().scale(documentView.props.ContentScaling())).inverse(); var [sptX, sptY] = transform.transformPoint(0, 0); let [bptX, bptY] = transform.transformPoint(documentView.props.PanelWidth(), documentView.props.PanelHeight()); if (documentView.props.Document.type === DocumentType.LINK) { - let rect = documentView.ContentDiv!.getElementsByClassName("docuLinkBox-cont")[0].getBoundingClientRect(); + const rect = documentView.ContentDiv!.getElementsByClassName("docuLinkBox-cont")[0].getBoundingClientRect(); sptX = rect.left; sptY = rect.top; bptX = rect.right; @@ -192,8 +189,8 @@ export class DocumentDecorations extends React.Component<{}, { value: string }> @action onBackgroundMove = (e: PointerEvent): void => { - let dragDocView = SelectionManager.SelectedDocuments()[0]; - let dragData = new DragManager.DocumentDragData(SelectionManager.SelectedDocuments().map(dv => dv.props.Document)); + const dragDocView = SelectionManager.SelectedDocuments()[0]; + const dragData = new DragManager.DocumentDragData(SelectionManager.SelectedDocuments().map(dv => dv.props.Document)); const [left, top] = dragDocView.props.ScreenToLocalTransform().scale(dragDocView.props.ContentScaling()).inverse().transformPoint(0, 0); dragData.offset = dragDocView.props.ScreenToLocalTransform().scale(dragDocView.props.ContentScaling()).transformDirection(e.x - left, e.y - top); dragData.moveDocument = SelectionManager.SelectedDocuments()[0].props.moveDocument; @@ -256,8 +253,8 @@ export class DocumentDecorations extends React.Component<{}, { value: string }> this._downX = e.pageX; this._downY = e.pageY; this._removeIcon = false; - let selDoc = SelectionManager.SelectedDocuments()[0]; - let selDocPos = selDoc.props.ScreenToLocalTransform().scale(selDoc.props.ContentScaling()).inverse().transformPoint(0, 0); + const selDoc = SelectionManager.SelectedDocuments()[0]; + const selDocPos = selDoc.props.ScreenToLocalTransform().scale(selDoc.props.ContentScaling()).inverse().transformPoint(0, 0); this._minimizedX = selDocPos[0] + 12; this._minimizedY = selDocPos[1] + 12; document.removeEventListener("pointermove", this.onMinimizeMove); @@ -272,12 +269,12 @@ export class DocumentDecorations extends React.Component<{}, { value: string }> e.stopPropagation(); if (Math.abs(e.pageX - this._downX) > Utils.DRAG_THRESHOLD || Math.abs(e.pageY - this._downY) > Utils.DRAG_THRESHOLD) { - let selDoc = SelectionManager.SelectedDocuments()[0]; - let selDocPos = selDoc.props.ScreenToLocalTransform().scale(selDoc.props.ContentScaling()).inverse().transformPoint(0, 0); - let snapped = Math.abs(e.pageX - selDocPos[0]) < 20 && Math.abs(e.pageY - selDocPos[1]) < 20; + const selDoc = SelectionManager.SelectedDocuments()[0]; + const selDocPos = selDoc.props.ScreenToLocalTransform().scale(selDoc.props.ContentScaling()).inverse().transformPoint(0, 0); + const snapped = Math.abs(e.pageX - selDocPos[0]) < 20 && Math.abs(e.pageY - selDocPos[1]) < 20; this._minimizedX = snapped ? selDocPos[0] + 4 : e.clientX; this._minimizedY = snapped ? selDocPos[1] - 18 : e.clientY; - let selectedDocs = SelectionManager.SelectedDocuments().map(sd => sd); + const selectedDocs = SelectionManager.SelectedDocuments().map(sd => sd); if (selectedDocs.length > 1) { this._iconDoc = this._iconDoc ? this._iconDoc : this.createIcon(SelectionManager.SelectedDocuments(), CollectionView.LayoutString("")); @@ -295,15 +292,15 @@ export class DocumentDecorations extends React.Component<{}, { value: string }> if (e.button === 0) { document.removeEventListener("pointermove", this.onMinimizeMove); document.removeEventListener("pointerup", this.onMinimizeUp); - let selectedDocs = SelectionManager.SelectedDocuments().map(sd => sd); + const selectedDocs = SelectionManager.SelectedDocuments().map(sd => sd); if (this._iconDoc && selectedDocs.length === 1 && this._removeIcon) { selectedDocs[0].props.removeDocument && selectedDocs[0].props.removeDocument(this._iconDoc); } if (!this._removeIcon && selectedDocs.length === 1) { // if you click on the top-left button when just 1 doc is selected, then collapse it. not sure why we don't do it for multiple selections this.getIconDoc(selectedDocs[0]).then(async icon => { - let minimizedDoc = await Cast(selectedDocs[0].props.Document.minimizedDoc, Doc); + const minimizedDoc = await Cast(selectedDocs[0].props.Document.minimizedDoc, Doc); if (minimizedDoc) { - let scrpt = selectedDocs[0].props.ScreenToLocalTransform().scale(selectedDocs[0].props.ContentScaling()).inverse().transformPoint( + const scrpt = selectedDocs[0].props.ScreenToLocalTransform().scale(selectedDocs[0].props.ContentScaling()).inverse().transformPoint( NumCast(minimizedDoc.x) - NumCast(selectedDocs[0].Document.x), NumCast(minimizedDoc.y) - NumCast(selectedDocs[0].Document.y)); SelectionManager.DeselectAll(); DocumentManager.Instance.animateBetweenPoint(scrpt, await DocListCastAsync(minimizedDoc.maximizedDocs)); @@ -317,8 +314,8 @@ export class DocumentDecorations extends React.Component<{}, { value: string }> @undoBatch @action createIcon = (selected: DocumentView[], layoutString: string): Doc => { - let doc = selected[0].props.Document; - let iconDoc = Docs.Create.IconDocument(layoutString); + const doc = selected[0].props.Document; + const iconDoc = Docs.Create.IconDocument(layoutString); iconDoc.isButton = true; IconBox.AutomaticTitle(iconDoc); @@ -334,7 +331,7 @@ export class DocumentDecorations extends React.Component<{}, { value: string }> } @action public getIconDoc = async (docView: DocumentView): Promise => { - let doc = docView.props.Document; + const doc = docView.props.Document; let iconDoc: Doc | undefined = await Cast(doc.minimizedDoc, Doc); if (!iconDoc || !DocumentManager.Instance.getDocumentView(iconDoc)) { @@ -344,8 +341,8 @@ export class DocumentDecorations extends React.Component<{}, { value: string }> return iconDoc; } moveIconDoc(iconDoc: Doc) { - let selView = SelectionManager.SelectedDocuments()[0]; - let where = (selView.props.ScreenToLocalTransform()).scale(selView.props.ContentScaling()). + const selView = SelectionManager.SelectedDocuments()[0]; + const where = (selView.props.ScreenToLocalTransform()).scale(selView.props.ContentScaling()). transformPoint(this._minimizedX - 12, this._minimizedY - 12); iconDoc.x = where[0] + NumCast(selView.props.Document.x); iconDoc.y = where[1] + NumCast(selView.props.Document.y); @@ -370,8 +367,8 @@ export class DocumentDecorations extends React.Component<{}, { value: string }> dist = dist < 3 ? 0 : dist; let usingRule = false; SelectionManager.SelectedDocuments().map(dv => { - let ruleProvider = dv.props.ruleProvider; - let heading = NumCast(dv.props.Document.heading); + const ruleProvider = dv.props.ruleProvider; + const heading = NumCast(dv.props.Document.heading); ruleProvider && heading && (Doc.GetProto(ruleProvider)["ruleRounding_" + heading] = `${Math.min(100, dist)}%`); usingRule = usingRule || (ruleProvider && heading ? true : false); }); @@ -419,8 +416,8 @@ export class DocumentDecorations extends React.Component<{}, { value: string }> let dX = 0, dY = 0, dW = 0, dH = 0; - let moveX = e.clientX - this._lastX; // e.movementX; - let moveY = e.clientY - this._lastY; // e.movementY; + const moveX = e.clientX - this._lastX; // e.movementX; + const moveY = e.clientY - this._lastY; // e.movementY; this._lastX = e.clientX; this._lastY = e.clientY; @@ -465,18 +462,18 @@ export class DocumentDecorations extends React.Component<{}, { value: string }> SelectionManager.SelectedDocuments().forEach(action((element: DocumentView) => { if (dX !== 0 || dY !== 0 || dW !== 0 || dH !== 0) { - let doc = PositionDocument(element.props.Document); - let layoutDoc = PositionDocument(Doc.Layout(element.props.Document)); + const doc = PositionDocument(element.props.Document); + const layoutDoc = PositionDocument(Doc.Layout(element.props.Document)); let nwidth = layoutDoc.nativeWidth || 0; let nheight = layoutDoc.nativeHeight || 0; - let width = (layoutDoc.width || 0); - let height = (layoutDoc.height || (nheight / nwidth * width)); - let scale = element.props.ScreenToLocalTransform().Scale * element.props.ContentScaling(); - let actualdW = Math.max(width + (dW * scale), 20); - let actualdH = Math.max(height + (dH * scale), 20); + const width = (layoutDoc.width || 0); + const height = (layoutDoc.height || (nheight / nwidth * width)); + const scale = element.props.ScreenToLocalTransform().Scale * element.props.ContentScaling(); + const actualdW = Math.max(width + (dW * scale), 20); + const actualdH = Math.max(height + (dH * scale), 20); doc.x = (doc.x || 0) + dX * (actualdW - width); doc.y = (doc.y || 0) + dY * (actualdH - height); - let fixedAspect = e.ctrlKey || (!layoutDoc.ignoreAspect && nwidth && nheight); + const fixedAspect = e.ctrlKey || (!layoutDoc.ignoreAspect && nwidth && nheight); if (fixedAspect && e.ctrlKey && layoutDoc.ignoreAspect) { layoutDoc.ignoreAspect = false; layoutDoc.nativeWidth = nwidth = layoutDoc.width || 0; @@ -529,8 +526,8 @@ export class DocumentDecorations extends React.Component<{}, { value: string }> @computed get selectionTitle(): string { if (SelectionManager.SelectedDocuments().length === 1) { - let selected = SelectionManager.SelectedDocuments()[0]; - let field = selected.props.Document[this._fieldKey]; + const selected = SelectionManager.SelectedDocuments()[0]; + const field = selected.props.Document[this._fieldKey]; if (typeof field === "string") { return field; } @@ -555,12 +552,12 @@ export class DocumentDecorations extends React.Component<{}, { value: string }> } } render() { - var bounds = this.Bounds; - let seldoc = SelectionManager.SelectedDocuments().length ? SelectionManager.SelectedDocuments()[0] : undefined; + const bounds = this.Bounds; + const seldoc = SelectionManager.SelectedDocuments().length ? SelectionManager.SelectedDocuments()[0] : undefined; if (SelectionManager.GetIsDragging() || bounds.x === Number.MAX_VALUE || !seldoc || this._hidden || isNaN(bounds.r) || isNaN(bounds.b) || isNaN(bounds.x) || isNaN(bounds.y)) { return (null); } - let minimizeIcon = ( + const minimizeIcon = (
      {/* Currently, this is set to be enabled if there is no ink selected. It might be interesting to think about minimizing ink if it's useful? -syip2*/} {SelectionManager.SelectedDocuments().length === 1 ? IconBox.DocumentIcon(StrCast(SelectionManager.SelectedDocuments()[0].props.Document.layout, "...")) : "..."} diff --git a/src/client/views/GlobalKeyHandler.ts b/src/client/views/GlobalKeyHandler.ts index 8f397e331..edc7df12a 100644 --- a/src/client/views/GlobalKeyHandler.ts +++ b/src/client/views/GlobalKeyHandler.ts @@ -25,7 +25,7 @@ export default class KeyManager { private router = new Map(); constructor() { - let isMac = navigator.platform.toLowerCase().indexOf("mac") >= 0; + const isMac = navigator.platform.toLowerCase().indexOf("mac") >= 0; // SHIFT CONTROL ALT META this.router.set("0000", this.unmodified); @@ -36,22 +36,22 @@ export default class KeyManager { } public handle = async (e: KeyboardEvent) => { - let keyname = e.key && e.key.toLowerCase(); + const keyname = e.key && e.key.toLowerCase(); this.handleGreedy(keyname); if (modifiers.includes(keyname)) { return; } - let bit = (value: boolean) => value ? "1" : "0"; - let modifierIndex = bit(e.shiftKey) + bit(e.ctrlKey) + bit(e.altKey) + bit(e.metaKey); + const bit = (value: boolean) => value ? "1" : "0"; + const modifierIndex = bit(e.shiftKey) + bit(e.ctrlKey) + bit(e.altKey) + bit(e.metaKey); - let handleConstrained = this.router.get(modifierIndex); + const handleConstrained = this.router.get(modifierIndex); if (!handleConstrained) { return; } - let control = await handleConstrained(keyname, e); + const control = await handleConstrained(keyname, e); control.stopPropagation && e.stopPropagation(); control.preventDefault && e.preventDefault(); @@ -65,7 +65,7 @@ export default class KeyManager { private unmodified = action((keyname: string, e: KeyboardEvent) => { switch (keyname) { case "escape": - let main = MainView.Instance; + const main = MainView.Instance; InkingControl.Instance.switchTool(InkTool.None); if (main.isPointerDown) { DragManager.AbortDrag(); @@ -89,8 +89,8 @@ export default class KeyManager { } UndoManager.RunInBatch(() => { SelectionManager.SelectedDocuments().map(docView => { - let doc = docView.props.Document; - let remove = docView.props.removeDocument; + const doc = docView.props.Document; + const remove = docView.props.removeDocument; remove && remove(doc); }); }, "delete"); @@ -121,8 +121,8 @@ export default class KeyManager { } private alt = action((keyname: string) => { - let stopPropagation = true; - let preventDefault = true; + const stopPropagation = true; + const preventDefault = true; switch (keyname) { // case "n": @@ -190,7 +190,7 @@ export default class KeyManager { } break; case "o": - let target = SelectionManager.SelectedDocuments()[0]; + const target = SelectionManager.SelectedDocuments()[0]; target && CollectionDockingView.Instance && CollectionDockingView.Instance.OpenFullScreen(target); break; case "r": @@ -220,12 +220,12 @@ export default class KeyManager { }); async printClipboard() { - let text: string = await navigator.clipboard.readText(); + const text: string = await navigator.clipboard.readText(); } private ctrl_shift = action((keyname: string) => { - let stopPropagation = true; - let preventDefault = true; + const stopPropagation = true; + const preventDefault = true; switch (keyname) { case "z": diff --git a/src/client/views/InkSelectDecorations.tsx b/src/client/views/InkSelectDecorations.tsx index d40df9b75..3ad50762d 100644 --- a/src/client/views/InkSelectDecorations.tsx +++ b/src/client/views/InkSelectDecorations.tsx @@ -29,10 +29,10 @@ export default class InkSelectDecorations extends Touchable { @computed get Bounds(): { x: number, y: number, b: number, r: number } { - let left = Number.MAX_VALUE; - let top = Number.MAX_VALUE; - let right = -Number.MAX_VALUE; - let bottom = -Number.MAX_VALUE; + const left = Number.MAX_VALUE; + const top = Number.MAX_VALUE; + const right = -Number.MAX_VALUE; + const bottom = -Number.MAX_VALUE; this._selectedInkNodes.forEach((value: PointData, key: string) => { // value.pathData.map(val => { // left = Math.min(val.x, left); @@ -45,7 +45,7 @@ export default class InkSelectDecorations extends Touchable { } render() { - let bounds = this.Bounds; + const bounds = this.Bounds; return
      { + switchColor = action((color: ColorState): void => { this._selectedColor = color.hex + (color.rgb.a !== undefined ? this.decimalToHexString(Math.round(color.rgb.a * 255)) : "ff"); if (InkingControl.Instance.selectedTool === InkTool.None) { - let selected = SelectionManager.SelectedDocuments(); - let oldColors = selected.map(view => { - let targetDoc = view.props.Document.dragFactory instanceof Doc ? view.props.Document.dragFactory : + const selected = SelectionManager.SelectedDocuments(); + const oldColors = selected.map(view => { + const targetDoc = view.props.Document.dragFactory instanceof Doc ? view.props.Document.dragFactory : view.props.Document.layout instanceof Doc ? view.props.Document.layout : view.props.Document.isTemplateField ? view.props.Document : Doc.GetProto(view.props.Document); - let sel = window.getSelection(); + const sel = window.getSelection(); if (StrCast(targetDoc.layout).indexOf("FormattedTextBox") !== -1 && (!sel || sel.toString() !== "")) { targetDoc.color = this._selectedColor; return { @@ -52,24 +52,24 @@ export class InkingControl { previous: StrCast(targetDoc.color) }; } - let oldColor = StrCast(targetDoc.backgroundColor); + const oldColor = StrCast(targetDoc.backgroundColor); let matchedColor = this._selectedColor; const cvd = view.props.ContainingCollectionDoc; let ruleProvider = view.props.ruleProvider; if (cvd) { if (!cvd.colorPalette) { - let defaultPalette = ["rg(114,229,239)", "rgb(255,246,209)", "rgb(255,188,156)", "rgb(247,220,96)", "rgb(122,176,238)", + const defaultPalette = ["rg(114,229,239)", "rgb(255,246,209)", "rgb(255,188,156)", "rgb(247,220,96)", "rgb(122,176,238)", "rgb(209,150,226)", "rgb(127,235,144)", "rgb(252,188,189)", "rgb(247,175,81)",]; - let colorPalette = Cast(cvd.colorPalette, listSpec("string")); + const colorPalette = Cast(cvd.colorPalette, listSpec("string")); if (!colorPalette) cvd.colorPalette = new List(defaultPalette); } - let cp = Cast(cvd.colorPalette, listSpec("string")) as string[]; + const cp = Cast(cvd.colorPalette, listSpec("string")) as string[]; let closest = 0; let dist = 10000000; - let ccol = Utils.fromRGBAstr(StrCast(targetDoc.backgroundColor)); + const ccol = Utils.fromRGBAstr(StrCast(targetDoc.backgroundColor)); for (let i = 0; i < cp.length; i++) { - let cpcol = Utils.fromRGBAstr(cp[i]); - let d = Math.sqrt((ccol.r - cpcol.r) * (ccol.r - cpcol.r) + (ccol.b - cpcol.b) * (ccol.b - cpcol.b) + (ccol.g - cpcol.g) * (ccol.g - cpcol.g)); + const cpcol = Utils.fromRGBAstr(cp[i]); + const d = Math.sqrt((ccol.r - cpcol.r) * (ccol.r - cpcol.r) + (ccol.b - cpcol.b) * (ccol.b - cpcol.b) + (ccol.g - cpcol.g) * (ccol.g - cpcol.g)); if (d < dist) { dist = d; closest = i; diff --git a/src/client/views/InkingStroke.tsx b/src/client/views/InkingStroke.tsx index a27f106e3..7cee84fc5 100644 --- a/src/client/views/InkingStroke.tsx +++ b/src/client/views/InkingStroke.tsx @@ -14,7 +14,7 @@ type InkDocument = makeInterface<[typeof documentSchema]>; const InkDocument = makeInterface(documentSchema); export function CreatePolyline(points: { x: number, y: number }[], left: number, top: number, color?: string, width?: number) { - let pts = points.reduce((acc: string, pt: { x: number, y: number }) => acc + `${pt.x - left},${pt.y - top} `, ""); + const pts = points.reduce((acc: string, pt: { x: number, y: number }) => acc + `${pt.x - left},${pt.y - top} `, ""); return ( p.x); - let ys = data.map(p => p.y); - let left = Math.min(...xs); - let top = Math.min(...ys); - let right = Math.max(...xs); - let bottom = Math.max(...ys); - let points = CreatePolyline(data, 0, 0, this.Document.color, this.Document.strokeWidth); - let width = right - left; - let height = bottom - top; - let scaleX = this.PanelWidth / width; - let scaleY = this.PanelHeight / height; + const data: InkData = Cast(this.Document.data, InkField)?.inkData ?? []; + const xs = data.map(p => p.x); + const ys = data.map(p => p.y); + const left = Math.min(...xs); + const top = Math.min(...ys); + const right = Math.max(...xs); + const bottom = Math.max(...ys); + const points = CreatePolyline(data, 0, 0, this.Document.color, this.Document.strokeWidth); + const width = right - left; + const height = bottom - top; + const scaleX = this.PanelWidth / width; + const scaleY = this.PanelHeight / height; return ( 1) { - let type = pathname[0]; + const type = pathname[0]; if (type === "doc") { CurrentUserUtils.MainDocId = pathname[1]; if (!this.userDoc) { @@ -158,7 +158,7 @@ export class MainView extends React.Component { initAuthenticationRouters = async () => { // Load the user's active workspace, or create a new one if initial session after signup - let received = CurrentUserUtils.MainDocId; + const received = CurrentUserUtils.MainDocId; if (received && !this.userDoc) { reaction( () => CurrentUserUtils.GuestTarget, @@ -175,7 +175,7 @@ export class MainView extends React.Component { }), ); } - let doc = this.userDoc && await Cast(this.userDoc.activeWorkspace, Doc); + const doc = this.userDoc && await Cast(this.userDoc.activeWorkspace, Doc); if (doc) { this.openWorkspace(doc); } else { @@ -186,9 +186,9 @@ export class MainView extends React.Component { @action createNewWorkspace = async (id?: string) => { - let workspaces = Cast(this.userDoc.workspaces, Doc) as Doc; - let workspaceCount = DocListCast(workspaces.data).length + 1; - let freeformOptions: DocumentOptions = { + const workspaces = Cast(this.userDoc.workspaces, Doc) as Doc; + const workspaceCount = DocListCast(workspaces.data).length + 1; + const freeformOptions: DocumentOptions = { x: 0, y: 400, width: this._panelWidth * .7, @@ -196,10 +196,10 @@ export class MainView extends React.Component { title: "Collection " + workspaceCount, backgroundColor: "white" }; - let freeformDoc = CurrentUserUtils.GuestTarget || Docs.Create.FreeformDocument([], freeformOptions); + const freeformDoc = CurrentUserUtils.GuestTarget || Docs.Create.FreeformDocument([], freeformOptions); Doc.AddDocToList(Doc.GetProto(CurrentUserUtils.UserDocument.documents as Doc), "data", freeformDoc); - var dockingLayout = { content: [{ type: 'row', content: [CollectionDockingView.makeDocumentConfig(freeformDoc, freeformDoc, 600)] }] }; - let mainDoc = Docs.Create.DockDocument([freeformDoc], JSON.stringify(dockingLayout), { title: `Workspace ${workspaceCount}` }, id); + const dockingLayout = { content: [{ type: 'row', content: [CollectionDockingView.makeDocumentConfig(freeformDoc, freeformDoc, 600)] }] }; + const mainDoc = Docs.Create.DockDocument([freeformDoc], JSON.stringify(dockingLayout), { title: `Workspace ${workspaceCount}` }, id); Doc.AddDocToList(workspaces, "data", mainDoc); // bcz: strangely, we need a timeout to prevent exceptions/issues initializing GoldenLayout (the rendering engine for Main Container) setTimeout(() => this.openWorkspace(mainDoc), 0); @@ -213,7 +213,7 @@ export class MainView extends React.Component { !("presentationView" in doc) && (doc.presentationView = new List([Docs.Create.TreeDocument([], { title: "Presentation" })])); this.userDoc ? (this.userDoc.activeWorkspace = doc) : (CurrentUserUtils.GuestWorkspace = doc); } - let state = this._urlState; + const state = this._urlState; if (state.sharing === true && !this.userDoc) { DocServer.Control.makeReadOnly(); } else { @@ -263,8 +263,8 @@ export class MainView extends React.Component { @computed get dockingContent() { const mainContainer = this.mainContainer; - let flyoutWidth = this.flyoutWidth; // bcz: need to be here because Measure messes with observables. - let flyoutTranslate = this._flyoutTranslate; + const flyoutWidth = this.flyoutWidth; // bcz: need to be here because Measure messes with observables. + const flyoutTranslate = this._flyoutTranslate; return {({ measureRef }) =>
      @@ -352,11 +352,11 @@ export class MainView extends React.Component { mainContainerXf = () => new Transform(0, -this._buttonBarHeight, 1); @computed get flyout() { - let sidebarContent = this.userDoc && this.userDoc.sidebarContainer; + const sidebarContent = this.userDoc && this.userDoc.sidebarContainer; if (!(sidebarContent instanceof Doc)) { return (null); } - let sidebarButtonsDoc = Cast(CurrentUserUtils.UserDocument.sidebarButtons, Doc) as Doc; + const sidebarButtonsDoc = Cast(CurrentUserUtils.UserDocument.sidebarButtons, Doc) as Doc; sidebarButtonsDoc.columnWidth = this.flyoutWidth / 3 - 30; return
      @@ -463,7 +463,7 @@ export class MainView extends React.Component { buttonBarXf = () => { if (!this._docBtnRef.current) return Transform.Identity(); - let { scale, translateX, translateY } = Utils.GetScreenTransform(this._docBtnRef.current); + const { scale, translateX, translateY } = Utils.GetScreenTransform(this._docBtnRef.current); return new Transform(-translateX, -translateY, 1 / scale); } @computed get docButtons() { diff --git a/src/client/views/MainViewModal.tsx b/src/client/views/MainViewModal.tsx index 221a0260a..9198fe3e3 100644 --- a/src/client/views/MainViewModal.tsx +++ b/src/client/views/MainViewModal.tsx @@ -14,9 +14,9 @@ export interface MainViewOverlayProps { export default class MainViewModal extends React.Component { render() { - let p = this.props; - let dialogueOpacity = p.dialogueBoxDisplayedOpacity || 1; - let overlayOpacity = p.overlayDisplayedOpacity || 0.4; + const p = this.props; + const dialogueOpacity = p.dialogueBoxDisplayedOpacity || 1; + const overlayOpacity = p.overlayDisplayedOpacity || 0.4; return !p.isDisplayed ? (null) : (
      { } else { let childSuccess = true; if (this._addChildren) { - for (let document of doc) { - let collectionChildren = await DocListCastAsync(document.data); + for (const document of doc) { + const collectionChildren = await DocListCastAsync(document.data); if (collectionChildren) { childSuccess = collectionChildren.every(c => KeyValueBox.ApplyKVPScript(c, this._currentKey, script)); } diff --git a/src/client/views/OverlayView.tsx b/src/client/views/OverlayView.tsx index 9869e24d1..32ada293c 100644 --- a/src/client/views/OverlayView.tsx +++ b/src/client/views/OverlayView.tsx @@ -148,7 +148,7 @@ export class OverlayView extends React.Component { return CurrentUserUtils.UserDocument.overlays instanceof Doc && DocListCast(CurrentUserUtils.UserDocument.overlays.data).map(d => { d.inOverlay = true; let offsetx = 0, offsety = 0; - let onPointerMove = action((e: PointerEvent) => { + const onPointerMove = action((e: PointerEvent) => { if (e.buttons === 1) { d.x = e.clientX + offsetx; d.y = e.clientY + offsety; @@ -156,14 +156,14 @@ export class OverlayView extends React.Component { e.preventDefault(); } }); - let onPointerUp = action((e: PointerEvent) => { + const onPointerUp = action((e: PointerEvent) => { document.removeEventListener("pointermove", onPointerMove); document.removeEventListener("pointerup", onPointerUp); e.stopPropagation(); e.preventDefault(); }); - let onPointerDown = (e: React.PointerEvent) => { + const onPointerDown = (e: React.PointerEvent) => { offsetx = NumCast(d.x) - e.clientX; offsety = NumCast(d.y) - e.clientY; e.stopPropagation(); diff --git a/src/client/views/PreviewCursor.tsx b/src/client/views/PreviewCursor.tsx index 136a272ab..fd0287b6c 100644 --- a/src/client/views/PreviewCursor.tsx +++ b/src/client/views/PreviewCursor.tsx @@ -26,7 +26,7 @@ export class PreviewCursor extends React.Component<{}> { paste = (e: ClipboardEvent) => { if (PreviewCursor.Visible) { if (e.clipboardData) { - let newPoint = PreviewCursor._getTransform().transformPoint(PreviewCursor._clickPoint[0], PreviewCursor._clickPoint[1]); + const newPoint = PreviewCursor._getTransform().transformPoint(PreviewCursor._clickPoint[0], PreviewCursor._clickPoint[1]); runInAction(() => { PreviewCursor.Visible = false; }); @@ -44,7 +44,7 @@ export class PreviewCursor extends React.Component<{}> { } // tests for URL and makes web document - let re: any = /^https?:\/\//g; + const re: any = /^https?:\/\//g; if (re.test(e.clipboardData.getData("text/plain"))) { const url = e.clipboardData.getData("text/plain"); PreviewCursor._addDocument(Docs.Create.WebDocument(url, { @@ -56,7 +56,7 @@ export class PreviewCursor extends React.Component<{}> { } // creates text document - let newBox = Docs.Create.TextDocument({ + const newBox = Docs.Create.TextDocument({ width: 200, height: 100, x: newPoint[0], y: newPoint[1], @@ -69,10 +69,10 @@ export class PreviewCursor extends React.Component<{}> { } //pasting in images if (e.clipboardData.getData("text/html") !== "" && e.clipboardData.getData("text/html").includes(" { onFocus = this.onFocus; onBlur = this.onBlur; } - let params = { } } // tslint:disable-next-line: no-unnecessary-callback-wrapper - let params: string[] = []; - let setParams = (p: string[]) => params.splice(0, params.length, ...p); - let scriptingBox = { + const params: string[] = []; + const setParams = (p: string[]) => params.splice(0, params.length, ...p); + const scriptingBox = { if (prewrapper) { text = prewrapper + text + (postwrapper ? postwrapper : ""); } diff --git a/src/client/views/TemplateMenu.tsx b/src/client/views/TemplateMenu.tsx index c65b338b4..e6116ca09 100644 --- a/src/client/views/TemplateMenu.tsx +++ b/src/client/views/TemplateMenu.tsx @@ -61,19 +61,19 @@ export class TemplateMenu extends React.Component { toggleFloat = (e: React.ChangeEvent): void => { SelectionManager.DeselectAll(); - let topDocView = this.props.docs[0]; - let topDoc = topDocView.props.Document; - let xf = topDocView.props.ScreenToLocalTransform(); - let ex = e.target.clientLeft; - let ey = e.target.clientTop; + const topDocView = this.props.docs[0]; + const topDoc = topDocView.props.Document; + const xf = topDocView.props.ScreenToLocalTransform(); + const ex = e.target.clientLeft; + const ey = e.target.clientTop; undoBatch(action(() => topDoc.z = topDoc.z ? 0 : 1))(); if (e.target.checked) { setTimeout(() => { - let newDocView = DocumentManager.Instance.getDocumentView(topDoc); + const newDocView = DocumentManager.Instance.getDocumentView(topDoc); if (newDocView) { - let de = new DragManager.DocumentDragData([topDoc]); + const de = new DragManager.DocumentDragData([topDoc]); de.moveDocument = topDocView.props.moveDocument; - let xf = newDocView.ContentDiv!.getBoundingClientRect(); + const xf = newDocView.ContentDiv!.getBoundingClientRect(); DragManager.StartDocumentDrag([newDocView.ContentDiv!], de, ex, ey, { offsetX: (ex - xf.left), offsetY: (ey - xf.top), handlers: { dragComplete: () => { }, }, @@ -82,9 +82,9 @@ export class TemplateMenu extends React.Component { } }, 10); } else if (topDocView.props.ContainingCollectionView) { - let collView = topDocView.props.ContainingCollectionView; - let [sx, sy] = xf.inverse().transformPoint(0, 0); - let [x, y] = collView.props.ScreenToLocalTransform().transformPoint(sx, sy); + const collView = topDocView.props.ContainingCollectionView; + const [sx, sy] = xf.inverse().transformPoint(0, 0); + const [x, y] = collView.props.ScreenToLocalTransform().transformPoint(sx, sy); topDoc.x = x; topDoc.y = y; } @@ -122,7 +122,7 @@ export class TemplateMenu extends React.Component { @action toggleChrome = (): void => { this.props.docs.map(dv => { - let layout = Doc.Layout(dv.Document); + const layout = Doc.Layout(dv.Document); layout.chromeStatus = (layout.chromeStatus !== "disabled" ? "disabled" : "enabled"); }); } @@ -147,8 +147,8 @@ export class TemplateMenu extends React.Component { document.removeEventListener("pointermove", this.onAliasButtonMoved); document.removeEventListener("pointerup", this.onAliasButtonUp); - let dragDocView = this.props.docs[0]; - let dragData = new DragManager.DocumentDragData([dragDocView.props.Document]); + const dragDocView = this.props.docs[0]; + const dragData = new DragManager.DocumentDragData([dragDocView.props.Document]); const [left, top] = dragDocView.props.ScreenToLocalTransform().inverse().transformPoint(0, 0); dragData.embedDoc = true; dragData.dropAction = "alias"; @@ -165,8 +165,8 @@ export class TemplateMenu extends React.Component { } render() { - let layout = Doc.Layout(this.props.docs[0].Document); - let templateMenu: Array = []; + const layout = Doc.Layout(this.props.docs[0].Document); + const templateMenu: Array = []; this.props.templates.forEach((checked, template) => templateMenu.push()); templateMenu.push(); diff --git a/src/client/views/Touchable.tsx b/src/client/views/Touchable.tsx index 0056a1d96..183d3e4e8 100644 --- a/src/client/views/Touchable.tsx +++ b/src/client/views/Touchable.tsx @@ -17,7 +17,7 @@ export abstract class Touchable extends React.Component { @action protected onTouchStart = (e: React.TouchEvent): void => { for (let i = 0; i < e.targetTouches.length; i++) { - let pt: any = e.targetTouches.item(i); + const pt: any = e.targetTouches.item(i); // pen is also a touch, but with a radius of 0.5 (at least with the surface pens). i doubt anyone's fingers are 2 pixels wide, // and this seems to be the only way of differentiating pen and touch on touch events if (pt.radiusX > 2 && pt.radiusY > 2) { @@ -59,7 +59,7 @@ export abstract class Touchable extends React.Component { } for (let i = 0; i < e.targetTouches.length; i++) { - let pt = e.targetTouches.item(i); + const pt = e.targetTouches.item(i); if (pt) { if (this.prevPoints.has(pt.identifier)) { this.prevPoints.set(pt.identifier, pt); @@ -78,7 +78,7 @@ export abstract class Touchable extends React.Component { // remove all the touches associated with the event for (let i = 0; i < e.targetTouches.length; i++) { - let pt = e.targetTouches.item(i); + const pt = e.targetTouches.item(i); if (pt) { if (this.prevPoints.has(pt.identifier)) { this.prevPoints.delete(pt.identifier); diff --git a/src/client/views/collections/CollectionDockingView.tsx b/src/client/views/collections/CollectionDockingView.tsx index 3040e74b0..77e6e1c93 100644 --- a/src/client/views/collections/CollectionDockingView.tsx +++ b/src/client/views/collections/CollectionDockingView.tsx @@ -96,13 +96,13 @@ export class CollectionDockingView extends React.Component { - let target = this._goldenLayout._maximisedItem; + const target = this._goldenLayout._maximisedItem; if (target !== null && this._maximizedSrc) { this._goldenLayout._maximisedItem.remove(); SelectionManager.SelectDoc(this._maximizedSrc, false); @@ -130,7 +130,7 @@ export class CollectionDockingView extends React.Component { @@ -176,21 +176,21 @@ export class CollectionDockingView extends React.Component { Doc.GetProto(document).lastOpened = new DateField; - let docContentConfig = CollectionDockingView.makeDocumentConfig(document, dataDocument); + const docContentConfig = CollectionDockingView.makeDocumentConfig(document, dataDocument); if (stack === undefined) { let stack: any = this._goldenLayout.root; while (!stack.isStack) { @@ -236,7 +236,7 @@ export class CollectionDockingView extends React.Component this._goldenLayout = new GoldenLayout(JSON.parse(config))); @@ -280,7 +280,7 @@ export class CollectionDockingView extends React.Component this.setupGoldenLayout(), 1); - let userDoc = CurrentUserUtils.UserDocument; + const userDoc = CurrentUserUtils.UserDocument; userDoc && DocListCast((userDoc.workspaces as Doc).data).map(d => d.workspaceBrush = false); this.props.Document.workspaceBrush = true; } @@ -311,7 +311,7 @@ export class CollectionDockingView extends React.Component { - var cur = this._containerRef.current; + const cur = this._containerRef.current; // bcz: since GoldenLayout isn't a React component itself, we need to notify it to resize when its document container's size has changed this._goldenLayout && this._goldenLayout.updateSize(cur!.getBoundingClientRect().width, cur!.getBoundingClientRect().height); @@ -330,19 +330,19 @@ export class CollectionDockingView extends React.Component { this._isPointerDown = true; - let onPointerUp = action(() => { + const onPointerUp = action(() => { window.removeEventListener("pointerup", onPointerUp); this._isPointerDown = false; }); window.addEventListener("pointerup", onPointerUp); - var className = (e.target as any).className; + const className = (e.target as any).className; if (className === "messageCounter") { e.stopPropagation(); e.preventDefault(); - let x = e.clientX; - let y = e.clientY; - let docid = (e.target as any).DashDocId; - let tab = (e.target as any).parentElement as HTMLElement; + const x = e.clientX; + const y = e.clientY; + const docid = (e.target as any).DashDocId; + const tab = (e.target as any).parentElement as HTMLElement; DocServer.GetRefField(docid).then(action(async (sourceDoc: Opt) => (sourceDoc instanceof Doc) && DragLinksAsDocuments(tab, x, y, sourceDoc))); } @@ -352,18 +352,18 @@ export class CollectionDockingView extends React.Component { - let matches = json.match(/\"documentId\":\"[a-z0-9-]+\"/g); - let docids = matches?.map(m => m.replace("\"documentId\":\"", "").replace("\"", "")); + const matches = json.match(/\"documentId\":\"[a-z0-9-]+\"/g); + const docids = matches?.map(m => m.replace("\"documentId\":\"", "").replace("\"", "")); if (docids) { - let docs = (await Promise.all(docids.map(id => DocServer.GetRefField(id)))).filter(f => f).map(f => f as Doc); + const docs = (await Promise.all(docids.map(id => DocServer.GetRefField(id)))).filter(f => f).map(f => f as Doc); Doc.GetProto(this.props.Document)[this.props.fieldKey] = new List(docs); } } @undoBatch stateChanged = () => { - var json = JSON.stringify(this._goldenLayout.toConfig()); + const json = JSON.stringify(this._goldenLayout.toConfig()); this.props.Document.dockingConfig = json; this.updateDataField(json); @@ -380,7 +380,7 @@ export class CollectionDockingView extends React.Component { if (!this._isPointerDown || !SelectionManager.GetIsDragging()) return; - var activeContentItem = tab.header.parent.getActiveContentItem(); + const activeContentItem = tab.header.parent.getActiveContentItem(); if (tab.contentItem !== activeContentItem) { tab.header.parent.setActiveContentItem(tab.contentItem); } @@ -447,12 +447,12 @@ export class CollectionDockingView extends React.Component { - let doc = await DocServer.GetRefField(contentItem.config.props.documentId); + const doc = await DocServer.GetRefField(contentItem.config.props.documentId); if (doc instanceof Doc) { let recent: Doc | undefined; if (CurrentUserUtils.UserDocument && (recent = await Cast(CurrentUserUtils.UserDocument.recentlyClosed, Doc))) { Doc.AddDocToList(recent, "data", doc, undefined, true, true); } - let theDoc = doc; + const theDoc = doc; CollectionDockingView.Instance._removedDocs.push(theDoc); } }); @@ -505,7 +505,7 @@ export class CollectionDockingView extends React.Component { @action public PinDoc(doc: Doc) { //add this new doc to props.Document - let curPres = Cast(CurrentUserUtils.UserDocument.curPresentation, Doc) as Doc; + const curPres = Cast(CurrentUserUtils.UserDocument.curPresentation, Doc) as Doc; if (curPres) { - let pinDoc = Docs.Create.PresElementBoxDocument({ backgroundColor: "transparent" }); + const pinDoc = Docs.Create.PresElementBoxDocument({ backgroundColor: "transparent" }); Doc.GetProto(pinDoc).presentationTargetDoc = doc; Doc.GetProto(pinDoc).title = ComputedField.MakeFunction('(this.presentationTargetDoc instanceof Doc) && this.presentationTargetDoc.title.toString()'); const data = Cast(curPres.data, listSpec(Doc)); @@ -581,8 +581,8 @@ export class DockedFrameRenderer extends React.Component { } componentDidMount() { - let observer = new _global.ResizeObserver(action((entries: any) => { - for (let entry of entries) { + const observer = new _global.ResizeObserver(action((entries: any) => { + for (const entry of entries) { this._panelWidth = entry.contentRect.width; this._panelHeight = entry.contentRect.height; } @@ -625,14 +625,14 @@ export class DockedFrameRenderer extends React.Component { const nativeH = this.nativeHeight(); const nativeW = this.nativeWidth(); if (!nativeW || !nativeH) return 1; - let wscale = this.panelWidth() / nativeW; + const wscale = this.panelWidth() / nativeW; return wscale * nativeH > this._panelHeight ? this._panelHeight / nativeH : wscale; } ScreenToLocalTransform = () => { if (this._mainCont && this._mainCont.children) { - let { scale, translateX, translateY } = Utils.GetScreenTransform(this._mainCont.children[0].firstChild as HTMLElement); - scale = Utils.GetScreenTransform(this._mainCont).scale; + const { translateX, translateY } = Utils.GetScreenTransform(this._mainCont.children[0].firstChild as HTMLElement); + const scale = Utils.GetScreenTransform(this._mainCont).scale; return CollectionDockingView.Instance.props.ScreenToLocalTransform().translate(-translateX, -translateY).scale(1 / this.contentScaling() / scale); } return Transform.Identity(); @@ -657,7 +657,7 @@ export class DockedFrameRenderer extends React.Component { @computed get docView() { if (!this._document) return (null); const document = this._document; - let resolvedDataDoc = document.layout instanceof Doc ? document : this._dataDoc; + const resolvedDataDoc = document.layout instanceof Doc ? document : this._dataDoc; return d[key] = castedValue); this.props.parent.drop(e, de); e.stopPropagation(); @@ -86,7 +86,7 @@ export class CollectionMasonryViewFieldRow extends React.Component { - let parsed = parseInt(value); + const parsed = parseInt(value); if (!isNaN(parsed)) return parsed; if (value.toLowerCase().indexOf("true") > -1) return true; if (value.toLowerCase().indexOf("false") > -1) return false; @@ -96,8 +96,8 @@ export class CollectionMasonryViewFieldRow extends React.Component { this._createAliasSelected = false; - let key = StrCast(this.props.parent.props.Document.sectionFilter); - let castedValue = this.getValue(value); + const key = StrCast(this.props.parent.props.Document.sectionFilter); + const castedValue = this.getValue(value); if (castedValue) { if (this.props.parent.sectionHeaders) { if (this.props.parent.sectionHeaders.map(i => i.heading).indexOf(castedValue.toString()) > -1) { @@ -135,18 +135,18 @@ export class CollectionMasonryViewFieldRow extends React.Component { this._createAliasSelected = false; - let key = StrCast(this.props.parent.props.Document.sectionFilter); - let newDoc = Docs.Create.TextDocument({ height: 18, width: 200, title: value }); + const key = StrCast(this.props.parent.props.Document.sectionFilter); + const newDoc = Docs.Create.TextDocument({ height: 18, width: 200, title: value }); newDoc[key] = this.getValue(this.props.heading); return this.props.parent.props.addDocument(newDoc); } deleteRow = undoBatch(action(() => { this._createAliasSelected = false; - let key = StrCast(this.props.parent.props.Document.sectionFilter); + const key = StrCast(this.props.parent.props.Document.sectionFilter); this.props.docList.forEach(d => d[key] = undefined); if (this.props.parent.sectionHeaders && this.props.headingObject) { - let index = this.props.parent.sectionHeaders.indexOf(this.props.headingObject); + const index = this.props.parent.sectionHeaders.indexOf(this.props.headingObject); this.props.parent.sectionHeaders.splice(index, 1); } })); @@ -162,18 +162,18 @@ export class CollectionMasonryViewFieldRow extends React.Component { - let [dx, dy] = this.props.screenToLocalTransform().transformDirection(e.clientX - this._startDragPosition.x, e.clientY - this._startDragPosition.y); + const [dx, dy] = this.props.screenToLocalTransform().transformDirection(e.clientX - this._startDragPosition.x, e.clientY - this._startDragPosition.y); if (Math.abs(dx) + Math.abs(dy) > this._sensitivity) { - let alias = Doc.MakeAlias(this.props.parent.props.Document); - let key = StrCast(this.props.parent.props.Document.sectionFilter); + const alias = Doc.MakeAlias(this.props.parent.props.Document); + const key = StrCast(this.props.parent.props.Document.sectionFilter); let value = this.getValue(this._heading); value = typeof value === "string" ? `"${value}"` : value; - let script = `return doc.${key} === ${value}`; - let compiled = CompileScript(script, { params: { doc: Doc.name } }); + const script = `return doc.${key} === ${value}`; + const compiled = CompileScript(script, { params: { doc: Doc.name } }); if (compiled.compiled) { - let scriptField = new ScriptField(compiled); + const scriptField = new ScriptField(compiled); alias.viewSpecScript = scriptField; - let dragData = new DragManager.DocumentDragData([alias]); + const dragData = new DragManager.DocumentDragData([alias]); DragManager.StartDocumentDrag([this._headerRef.current!], dragData, e.clientX, e.clientY); } @@ -196,7 +196,7 @@ export class CollectionMasonryViewFieldRow extends React.Component { - let selected = this.props.headingObject ? this.props.headingObject.color : "#f1efeb"; + const selected = this.props.headingObject ? this.props.headingObject.color : "#f1efeb"; - let pink = PastelSchemaPalette.get("pink2"); - let purple = PastelSchemaPalette.get("purple4"); - let blue = PastelSchemaPalette.get("bluegreen1"); - let yellow = PastelSchemaPalette.get("yellow4"); - let red = PastelSchemaPalette.get("red2"); - let green = PastelSchemaPalette.get("bluegreen7"); - let cyan = PastelSchemaPalette.get("bluegreen5"); - let orange = PastelSchemaPalette.get("orange1"); - let gray = "#f1efeb"; + const pink = PastelSchemaPalette.get("pink2"); + const purple = PastelSchemaPalette.get("purple4"); + const blue = PastelSchemaPalette.get("bluegreen1"); + const yellow = PastelSchemaPalette.get("yellow4"); + const red = PastelSchemaPalette.get("red2"); + const green = PastelSchemaPalette.get("bluegreen7"); + const cyan = PastelSchemaPalette.get("bluegreen5"); + const orange = PastelSchemaPalette.get("orange1"); + const gray = "#f1efeb"; return (
      @@ -242,7 +242,7 @@ export class CollectionMasonryViewFieldRow extends React.Component this._collapsed = !this._collapsed); renderMenu = () => { - let selected = this._createAliasSelected; + const selected = this._createAliasSelected; return (
      Create Alias
      @@ -258,10 +258,10 @@ export class CollectionMasonryViewFieldRow extends React.Component "", SetValue: this.addDocument, contents: "+ NEW", @@ -293,10 +293,10 @@ export class CollectionMasonryViewFieldRow extends React.Component evContents, SetValue: this.headingChanged, contents: evContents, @@ -343,8 +343,8 @@ export class CollectionMasonryViewFieldRow extends React.Component {({ measureRef }) => { return
      diff --git a/src/client/views/collections/CollectionSchemaCells.tsx b/src/client/views/collections/CollectionSchemaCells.tsx index 54a36f691..1700c14cf 100644 --- a/src/client/views/collections/CollectionSchemaCells.tsx +++ b/src/client/views/collections/CollectionSchemaCells.tsx @@ -1,7 +1,7 @@ import React = require("react"); -import { action, computed, observable, trace, untracked, toJS } from "mobx"; +import { action, observable } from "mobx"; import { observer } from "mobx-react"; -import ReactTable, { CellInfo, ComponentPropsGetterR, ReactTableDefaults, Column } from "react-table"; +import { CellInfo } from "react-table"; import "react-table/react-table.css"; import { emptyFunction, returnFalse, returnZero, returnOne } from "../../../Utils"; import { Doc, DocListCast, DocListCastAsync, Field, Opt } from "../../../new_fields/Doc"; @@ -9,7 +9,7 @@ import { Id } from "../../../new_fields/FieldSymbols"; import { SetupDrag, DragManager } from "../../util/DragManager"; import { CompileScript } from "../../util/Scripting"; import { Transform } from "../../util/Transform"; -import { COLLECTION_BORDER_WIDTH, MAX_ROW_HEIGHT } from '../globalCssVariables.scss'; +import { MAX_ROW_HEIGHT } from '../globalCssVariables.scss'; import '../DocumentDecorations.scss'; import { EditableView } from "../EditableView"; import { FieldView, FieldViewProps } from "../nodes/FieldView"; @@ -89,8 +89,8 @@ export class CollectionSchemaCell extends React.Component { // this._isEditing = true; // this.props.setIsEditing(true); - let field = this.props.rowProps.original[this.props.rowProps.column.id!]; - let doc = FieldValue(Cast(field, Doc)); + const field = this.props.rowProps.original[this.props.rowProps.column.id!]; + const doc = FieldValue(Cast(field, Doc)); if (typeof field === "object" && doc) this.props.setPreviewDoc(doc); } @@ -106,12 +106,12 @@ export class CollectionSchemaCell extends React.Component { private drop = (e: Event, de: DragManager.DropEvent) => { if (de.data instanceof DragManager.DocumentDragData) { - let fieldKey = this.props.rowProps.column.id as string; + const fieldKey = this.props.rowProps.column.id as string; if (de.data.draggedDocuments.length === 1) { this._document[fieldKey] = de.data.draggedDocuments[0]; } else { - let coll = Docs.Create.SchemaDocument([new SchemaHeaderField("title", "#f1efeb")], de.data.draggedDocuments, {}); + const coll = Docs.Create.SchemaDocument([new SchemaHeaderField("title", "#f1efeb")], de.data.draggedDocuments, {}); this._document[fieldKey] = coll; } e.stopPropagation(); @@ -138,9 +138,9 @@ export class CollectionSchemaCell extends React.Component { // } renderCellWithType(type: string | undefined) { - let dragRef: React.RefObject = React.createRef(); + const dragRef: React.RefObject = React.createRef(); - let props: FieldViewProps = { + const props: FieldViewProps = { Document: this.props.rowProps.original, DataDoc: this.props.rowProps.original, fieldKey: this.props.rowProps.column.id as string, @@ -161,23 +161,23 @@ export class CollectionSchemaCell extends React.Component { ContentScaling: returnOne }; - let field = props.Document[props.fieldKey]; - let doc = FieldValue(Cast(field, Doc)); - let fieldIsDoc = (type === "document" && typeof field === "object") || (typeof field === "object" && doc); + const field = props.Document[props.fieldKey]; + const doc = FieldValue(Cast(field, Doc)); + const fieldIsDoc = (type === "document" && typeof field === "object") || (typeof field === "object" && doc); - let onItemDown = (e: React.PointerEvent) => { + const onItemDown = (e: React.PointerEvent) => { if (fieldIsDoc) { SetupDrag(this._focusRef, () => this._document[props.fieldKey] instanceof Doc ? this._document[props.fieldKey] : this._document, this._document[props.fieldKey] instanceof Doc ? (doc: Doc, target: Doc, addDoc: (newDoc: Doc) => any) => addDoc(doc) : this.props.moveDocument, this._document[props.fieldKey] instanceof Doc ? "alias" : this.props.Document.schemaDoc ? "copy" : undefined)(e); } }; - let onPointerEnter = (e: React.PointerEvent): void => { + const onPointerEnter = (e: React.PointerEvent): void => { if (e.buttons === 1 && SelectionManager.GetIsDragging() && (type === "document" || type === undefined)) { dragRef.current!.className = "collectionSchemaView-cellContainer doc-drag-over"; } }; - let onPointerLeave = (e: React.PointerEvent): void => { + const onPointerLeave = (e: React.PointerEvent): void => { dragRef.current!.className = "collectionSchemaView-cellContainer"; }; @@ -187,7 +187,7 @@ export class CollectionSchemaCell extends React.Component { if (type === "string") contents = typeof field === "string" ? (StrCast(field) === "" ? "--" : StrCast(field)) : "--" + typeof field + "--"; if (type === "boolean") contents = typeof field === "boolean" ? (BoolCast(field) ? "true" : "false") : "--" + typeof field + "--"; if (type === "document") { - let doc = FieldValue(Cast(field, Doc)); + const doc = FieldValue(Cast(field, Doc)); contents = typeof field === "object" ? doc ? StrCast(doc.title) === "" ? "--" : StrCast(doc.title) : `--${typeof field}--` : `--${typeof field}--`; } @@ -215,7 +215,7 @@ export class CollectionSchemaCell extends React.Component { height={"auto"} maxHeight={Number(MAX_ROW_HEIGHT)} GetValue={() => { - let field = props.Document[props.fieldKey]; + const field = props.Document[props.fieldKey]; if (Field.IsField(field)) { return Field.toScriptString(field); } @@ -226,7 +226,7 @@ export class CollectionSchemaCell extends React.Component { if (value.startsWith(":=")) { return this.props.setComputed(value.substring(2), props.Document, this.props.rowProps.column.id!, this.props.row, this.props.col); } - let script = CompileScript(value, { requiredType: type, addReturn: true, params: { this: Doc.name, $r: "number", $c: "number", $: "any" } }); + const script = CompileScript(value, { requiredType: type, addReturn: true, params: { this: Doc.name, $r: "number", $c: "number", $: "any" } }); if (!script.compiled) { return false; } @@ -287,15 +287,15 @@ export class CollectionSchemaCheckboxCell extends CollectionSchemaCell { @action toggleChecked = (e: React.ChangeEvent) => { this._isChecked = e.target.checked; - let script = CompileScript(e.target.checked.toString(), { requiredType: "boolean", addReturn: true, params: { this: Doc.name } }); + const script = CompileScript(e.target.checked.toString(), { requiredType: "boolean", addReturn: true, params: { this: Doc.name } }); if (script.compiled) { this.applyToDoc(this._document, this.props.row, this.props.col, script.run); } } render() { - let reference = React.createRef(); - let onItemDown = (e: React.PointerEvent) => { + const reference = React.createRef(); + const onItemDown = (e: React.PointerEvent) => { (!this.props.CollectionView || !this.props.CollectionView.props.isSelected() ? undefined : SetupDrag(reference, () => this._document, this.props.moveDocument, this.props.Document.schemaDoc ? "copy" : undefined)(e)); }; diff --git a/src/client/views/collections/CollectionSchemaHeaders.tsx b/src/client/views/collections/CollectionSchemaHeaders.tsx index d24f63fbb..0114342b9 100644 --- a/src/client/views/collections/CollectionSchemaHeaders.tsx +++ b/src/client/views/collections/CollectionSchemaHeaders.tsx @@ -1,5 +1,5 @@ import React = require("react"); -import { action, computed, observable, trace, untracked } from "mobx"; +import { action, observable } from "mobx"; import { observer } from "mobx-react"; import "./CollectionSchemaView.scss"; import { faPlus, faFont, faHashtag, faAlignJustify, faCheckSquare, faToggleOn, faSortAmountDown, faSortAmountUp, faTimes } from '@fortawesome/free-solid-svg-icons'; @@ -7,10 +7,8 @@ import { library, IconProp } from "@fortawesome/fontawesome-svg-core"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { Flyout, anchorPoints } from "../DocumentDecorations"; import { ColumnType } from "./CollectionSchemaView"; -import { emptyFunction } from "../../../Utils"; -import { contains } from "typescript-collections/dist/lib/arrays"; import { faFile } from "@fortawesome/free-regular-svg-icons"; -import { SchemaHeaderField, RandomPastel, PastelSchemaPalette } from "../../../new_fields/SchemaHeaderField"; +import { SchemaHeaderField, PastelSchemaPalette } from "../../../new_fields/SchemaHeaderField"; import { undoBatch } from "../../util/UndoManager"; library.add(faPlus, faFont, faHashtag, faAlignJustify, faCheckSquare, faToggleOn, faFile as any, faSortAmountDown, faSortAmountUp, faTimes); @@ -32,7 +30,7 @@ export interface HeaderProps { export class CollectionSchemaHeader extends React.Component { render() { - let icon: IconProp = this.props.keyType === ColumnType.Number ? "hashtag" : this.props.keyType === ColumnType.String ? "font" : + const icon: IconProp = this.props.keyType === ColumnType.Number ? "hashtag" : this.props.keyType === ColumnType.String ? "font" : this.props.keyType === ColumnType.Boolean ? "check-square" : this.props.keyType === ColumnType.Doc ? "file" : "align-justify"; return (
      @@ -139,7 +137,7 @@ export class CollectionSchemaColumnMenu extends React.Component renderTypes = () => { if (this.props.typeConst) return <>; - let type = this.props.columnField.type; + const type = this.props.columnField.type; return (
      @@ -170,7 +168,7 @@ export class CollectionSchemaColumnMenu extends React.Component } renderSorting = () => { - let sort = this.props.columnField.desc; + const sort = this.props.columnField.desc; return (
      @@ -193,14 +191,14 @@ export class CollectionSchemaColumnMenu extends React.Component } renderColors = () => { - let selected = this.props.columnField.color; + const selected = this.props.columnField.color; - let pink = PastelSchemaPalette.get("pink2"); - let purple = PastelSchemaPalette.get("purple2"); - let blue = PastelSchemaPalette.get("bluegreen1"); - let yellow = PastelSchemaPalette.get("yellow4"); - let red = PastelSchemaPalette.get("red2"); - let gray = "#f1efeb"; + const pink = PastelSchemaPalette.get("pink2"); + const purple = PastelSchemaPalette.get("purple2"); + const blue = PastelSchemaPalette.get("bluegreen1"); + const yellow = PastelSchemaPalette.get("yellow4"); + const red = PastelSchemaPalette.get("red2"); + const gray = "#f1efeb"; return (
      @@ -291,8 +289,8 @@ class KeysDropdown extends React.Component { @action onKeyDown = (e: React.KeyboardEvent): void => { if (e.key === "Enter") { - let keyOptions = this._searchTerm === "" ? this.props.possibleKeys : this.props.possibleKeys.filter(key => key.toUpperCase().indexOf(this._searchTerm.toUpperCase()) > -1); - let exactFound = keyOptions.findIndex(key => key.toUpperCase() === this._searchTerm.toUpperCase()) > -1 || + const keyOptions = this._searchTerm === "" ? this.props.possibleKeys : this.props.possibleKeys.filter(key => key.toUpperCase().indexOf(this._searchTerm.toUpperCase()) > -1); + const exactFound = keyOptions.findIndex(key => key.toUpperCase() === this._searchTerm.toUpperCase()) > -1 || this.props.existingKeys.findIndex(key => key.toUpperCase() === this._searchTerm.toUpperCase()) > -1; if (!exactFound && this._searchTerm !== "" && this.props.canAddNew) { @@ -334,11 +332,11 @@ class KeysDropdown extends React.Component { renderOptions = (): JSX.Element[] | JSX.Element => { if (!this._isOpen) return <>; - let keyOptions = this._searchTerm === "" ? this.props.possibleKeys : this.props.possibleKeys.filter(key => key.toUpperCase().indexOf(this._searchTerm.toUpperCase()) > -1); - let exactFound = keyOptions.findIndex(key => key.toUpperCase() === this._searchTerm.toUpperCase()) > -1 || + const keyOptions = this._searchTerm === "" ? this.props.possibleKeys : this.props.possibleKeys.filter(key => key.toUpperCase().indexOf(this._searchTerm.toUpperCase()) > -1); + const exactFound = keyOptions.findIndex(key => key.toUpperCase() === this._searchTerm.toUpperCase()) > -1 || this.props.existingKeys.findIndex(key => key.toUpperCase() === this._searchTerm.toUpperCase()) > -1; - let options = keyOptions.map(key => { + const options = keyOptions.map(key => { return
      { this.onSelect(key); this.setSearchTerm(""); }}>{key}
      ; }); diff --git a/src/client/views/collections/CollectionSchemaMovableTableHOC.tsx b/src/client/views/collections/CollectionSchemaMovableTableHOC.tsx index 274c8b6d1..90320df82 100644 --- a/src/client/views/collections/CollectionSchemaMovableTableHOC.tsx +++ b/src/client/views/collections/CollectionSchemaMovableTableHOC.tsx @@ -1,18 +1,18 @@ import React = require("react"); -import { ReactTableDefaults, TableCellRenderer, ComponentPropsGetterR, ComponentPropsGetter0, RowInfo } from "react-table"; +import { ReactTableDefaults, TableCellRenderer, RowInfo } from "react-table"; import "./CollectionSchemaView.scss"; import { Transform } from "../../util/Transform"; import { Doc } from "../../../new_fields/Doc"; import { DragManager, SetupDrag } from "../../util/DragManager"; import { SelectionManager } from "../../util/SelectionManager"; -import { Cast, FieldValue, StrCast } from "../../../new_fields/Types"; +import { Cast, FieldValue } from "../../../new_fields/Types"; import { ContextMenu } from "../ContextMenu"; import { action } from "mobx"; import { library } from '@fortawesome/fontawesome-svg-core'; import { faGripVertical, faTrash } from '@fortawesome/free-solid-svg-icons'; import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; import { DocumentManager } from "../../util/DocumentManager"; -import { PastelSchemaPalette, SchemaHeaderField } from "../../../new_fields/SchemaHeaderField"; +import { SchemaHeaderField } from "../../../new_fields/SchemaHeaderField"; import { undoBatch } from "../../util/UndoManager"; library.add(faGripVertical, faTrash); @@ -43,10 +43,10 @@ export class MovableColumn extends React.Component { document.removeEventListener("pointermove", this.onPointerMove); } onDragMove = (e: PointerEvent): void => { - let x = this.props.ScreenToLocalTransform().transformPoint(e.clientX, e.clientY); - let rect = this._header!.current!.getBoundingClientRect(); - let bounds = this.props.ScreenToLocalTransform().transformPoint(rect.left + ((rect.right - rect.left) / 2), rect.top); - let before = x[0] < bounds[0]; + const x = this.props.ScreenToLocalTransform().transformPoint(e.clientX, e.clientY); + const rect = this._header!.current!.getBoundingClientRect(); + const bounds = this.props.ScreenToLocalTransform().transformPoint(rect.left + ((rect.right - rect.left) / 2), rect.top); + const before = x[0] < bounds[0]; this._header!.current!.className = "collectionSchema-col-wrapper"; if (before) this._header!.current!.className += " col-before"; if (!before) this._header!.current!.className += " col-after"; @@ -62,10 +62,10 @@ export class MovableColumn extends React.Component { colDrop = (e: Event, de: DragManager.DropEvent) => { document.removeEventListener("pointermove", this.onDragMove, true); - let x = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); - let rect = this._header!.current!.getBoundingClientRect(); - let bounds = this.props.ScreenToLocalTransform().transformPoint(rect.left + ((rect.right - rect.left) / 2), rect.top); - let before = x[0] < bounds[0]; + const x = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); + const rect = this._header!.current!.getBoundingClientRect(); + const bounds = this.props.ScreenToLocalTransform().transformPoint(rect.left + ((rect.right - rect.left) / 2), rect.top); + const before = x[0] < bounds[0]; if (de.data instanceof DragManager.ColumnDragData) { this.props.reorderColumns(de.data.colKey, this.props.columnValue, before, this.props.allColumns); return true; @@ -74,21 +74,21 @@ export class MovableColumn extends React.Component { } onPointerMove = (e: PointerEvent) => { - let onRowMove = (e: PointerEvent) => { + const onRowMove = (e: PointerEvent) => { e.stopPropagation(); e.preventDefault(); document.removeEventListener("pointermove", onRowMove); document.removeEventListener('pointerup', onRowUp); - let dragData = new DragManager.ColumnDragData(this.props.columnValue); + const dragData = new DragManager.ColumnDragData(this.props.columnValue); DragManager.StartColumnDrag(this._dragRef.current!, dragData, e.x, e.y); }; - let onRowUp = (): void => { + const onRowUp = (): void => { document.removeEventListener("pointermove", onRowMove); document.removeEventListener('pointerup', onRowUp); }; if (e.buttons === 1) { - let [dx, dy] = this.props.ScreenToLocalTransform().transformDirection(e.clientX - this._startDragPosition.x, e.clientY - this._startDragPosition.y); + const [dx, dy] = this.props.ScreenToLocalTransform().transformDirection(e.clientX - this._startDragPosition.x, e.clientY - this._startDragPosition.y); if (Math.abs(dx) + Math.abs(dy) > this._sensitivity) { document.removeEventListener("pointermove", this.onPointerMove); e.stopPropagation(); @@ -106,14 +106,14 @@ export class MovableColumn extends React.Component { @action onPointerDown = (e: React.PointerEvent, ref: React.RefObject) => { this._dragRef = ref; - let [dx, dy] = this.props.ScreenToLocalTransform().transformDirection(e.clientX, e.clientY); + const [dx, dy] = this.props.ScreenToLocalTransform().transformDirection(e.clientX, e.clientY); this._startDragPosition = { x: dx, y: dy }; document.addEventListener("pointermove", this.onPointerMove); } render() { - let reference = React.createRef(); + const reference = React.createRef(); return (
      @@ -152,10 +152,10 @@ export class MovableRow extends React.Component { document.removeEventListener("pointermove", this.onDragMove, true); } onDragMove = (e: PointerEvent): void => { - let x = this.props.ScreenToLocalTransform().transformPoint(e.clientX, e.clientY); - let rect = this._header!.current!.getBoundingClientRect(); - let bounds = this.props.ScreenToLocalTransform().transformPoint(rect.left, rect.top + rect.height / 2); - let before = x[1] < bounds[1]; + const x = this.props.ScreenToLocalTransform().transformPoint(e.clientX, e.clientY); + const rect = this._header!.current!.getBoundingClientRect(); + const bounds = this.props.ScreenToLocalTransform().transformPoint(rect.left, rect.top + rect.height / 2); + const before = x[1] < bounds[1]; this._header!.current!.className = "collectionSchema-row-wrapper"; if (before) this._header!.current!.className += " row-above"; if (!before) this._header!.current!.className += " row-below"; @@ -173,16 +173,16 @@ export class MovableRow extends React.Component { const rowDoc = FieldValue(Cast(this.props.rowInfo.original, Doc)); if (!rowDoc) return false; - let x = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); - let rect = this._header!.current!.getBoundingClientRect(); - let bounds = this.props.ScreenToLocalTransform().transformPoint(rect.left, rect.top + rect.height / 2); - let before = x[1] < bounds[1]; + const x = this.props.ScreenToLocalTransform().transformPoint(de.x, de.y); + const rect = this._header!.current!.getBoundingClientRect(); + const bounds = this.props.ScreenToLocalTransform().transformPoint(rect.left, rect.top + rect.height / 2); + const before = x[1] < bounds[1]; if (de.data instanceof DragManager.DocumentDragData) { e.stopPropagation(); if (de.data.draggedDocuments[0] === rowDoc) return true; - let addDocument = (doc: Doc) => this.props.addDoc(doc, rowDoc, before); - let movedDocs = de.data.draggedDocuments; + const addDocument = (doc: Doc) => this.props.addDoc(doc, rowDoc, before); + const movedDocs = de.data.draggedDocuments; return (de.data.dropAction || de.data.userDropAction) ? de.data.droppedDocuments.reduce((added: boolean, d) => this.props.addDoc(d, rowDoc, before) || added, false) : (de.data.moveDocument) ? @@ -193,14 +193,14 @@ export class MovableRow extends React.Component { } onRowContextMenu = (e: React.MouseEvent): void => { - let description = this.props.rowWrapped ? "Unwrap text on row" : "Text wrap row"; + const description = this.props.rowWrapped ? "Unwrap text on row" : "Text wrap row"; ContextMenu.Instance.addItem({ description: description, event: () => this.props.textWrapRow(this.props.rowInfo.original), icon: "file-pdf" }); } @undoBatch @action move: DragManager.MoveFunction = (doc: Doc, target: Doc, addDoc) => { - let targetView = DocumentManager.Instance.getDocumentView(target); + const targetView = DocumentManager.Instance.getDocumentView(target); if (targetView && targetView.props.ContainingCollectionDoc) { return doc !== target && doc !== targetView.props.ContainingCollectionDoc && this.props.removeDoc(doc) && addDoc(doc); } @@ -217,8 +217,8 @@ export class MovableRow extends React.Component { const doc = FieldValue(Cast(original, Doc)); if (!doc) return <>; - let reference = React.createRef(); - let onItemDown = SetupDrag(reference, () => doc, this.move); + const reference = React.createRef(); + const onItemDown = SetupDrag(reference, () => doc, this.move); let className = "collectionSchema-row"; if (this.props.rowFocused) className += " row-focused"; diff --git a/src/client/views/collections/CollectionSchemaView.tsx b/src/client/views/collections/CollectionSchemaView.tsx index 65856cad3..f336eaf75 100644 --- a/src/client/views/collections/CollectionSchemaView.tsx +++ b/src/client/views/collections/CollectionSchemaView.tsx @@ -94,11 +94,11 @@ export class CollectionSchemaView extends CollectionSubView(doc => doc) { } @action onDividerMove = (e: PointerEvent): void => { - let nativeWidth = this._mainCont!.getBoundingClientRect(); - let minWidth = 40; - let maxWidth = 1000; - let movedWidth = this.props.ScreenToLocalTransform().transformDirection(nativeWidth.right - e.clientX, 0)[0]; - let width = movedWidth < minWidth ? minWidth : movedWidth > maxWidth ? maxWidth : movedWidth; + const nativeWidth = this._mainCont!.getBoundingClientRect(); + const minWidth = 40; + const maxWidth = 1000; + const movedWidth = this.props.ScreenToLocalTransform().transformDirection(nativeWidth.right - e.clientX, 0)[0]; + const width = movedWidth < minWidth ? minWidth : movedWidth > maxWidth ? maxWidth : movedWidth; this.props.Document.schemaPreviewWidth = width; } @action @@ -136,7 +136,7 @@ export class CollectionSchemaView extends CollectionSubView(doc => doc) { @computed get previewPanel() { - let layoutDoc = this.previewDocument ? Doc.expandTemplateLayout(this.previewDocument, this.props.DataDoc) : undefined; + const layoutDoc = this.previewDocument ? Doc.expandTemplateLayout(this.previewDocument, this.props.DataDoc) : undefined; return
      { @computed get childDocs() { if (this.props.childDocs) return this.props.childDocs; - let doc = this.props.dataDoc ? this.props.dataDoc : this.props.Document; + const doc = this.props.dataDoc ? this.props.dataDoc : this.props.Document; return DocListCast(doc[this.props.fieldKey]); } set childDocs(docs: Doc[]) { - let doc = this.props.dataDoc ? this.props.dataDoc : this.props.Document; + const doc = this.props.dataDoc ? this.props.dataDoc : this.props.Document; doc[this.props.fieldKey] = new List(docs); } @@ -288,12 +288,12 @@ export class SchemaTable extends React.Component { @computed get borderWidth() { return Number(COLLECTION_BORDER_WIDTH); } @computed get tableColumns(): Column[] { - let possibleKeys = this.documentKeys.filter(key => this.columns.findIndex(existingKey => existingKey.heading.toUpperCase() === key.toUpperCase()) === -1); - let columns: Column[] = []; - let tableIsFocused = this.props.isFocused(this.props.Document); - let focusedRow = this._focusedCell.row; - let focusedCol = this._focusedCell.col; - let isEditable = !this._headerIsEditing; + const possibleKeys = this.documentKeys.filter(key => this.columns.findIndex(existingKey => existingKey.heading.toUpperCase() === key.toUpperCase()) === -1); + const columns: Column[] = []; + const tableIsFocused = this.props.isFocused(this.props.Document); + const focusedRow = this._focusedCell.row; + const focusedCol = this._focusedCell.col; + const isEditable = !this._headerIsEditing; if (this.childDocs.reduce((found, doc) => found || doc.type === "collection", false)) { columns.push( @@ -313,8 +313,8 @@ export class SchemaTable extends React.Component { ); } - let cols = this.columns.map(col => { - let header = { + const header = c.heading)} @@ -333,11 +333,11 @@ export class SchemaTable extends React.Component { accessor: (doc: Doc) => doc ? doc[col.heading] : 0, id: col.heading, Cell: (rowProps: CellInfo) => { - let rowIndex = rowProps.index; - let columnIndex = this.columns.map(c => c.heading).indexOf(rowProps.column.id!); - let isFocused = focusedRow === rowIndex && focusedCol === columnIndex && tableIsFocused; + const rowIndex = rowProps.index; + const columnIndex = this.columns.map(c => c.heading).indexOf(rowProps.column.id!); + const isFocused = focusedRow === rowIndex && focusedCol === columnIndex && tableIsFocused; - let props: CellProps = { + const props: CellProps = { row: rowIndex, col: columnIndex, rowProps: rowProps, @@ -358,7 +358,7 @@ export class SchemaTable extends React.Component { getField: this.getField, }; - let colType = this.getColumnType(col); + const colType = this.getColumnType(col); if (colType === ColumnType.Number) return ; if (colType === ColumnType.String) return ; if (colType === ColumnType.Boolean) return ; @@ -384,9 +384,9 @@ export class SchemaTable extends React.Component { constructor(props: SchemaTableProps) { super(props); // convert old schema columns (list of strings) into new schema columns (list of schema header fields) - let oldSchemaColumns = Cast(this.props.Document.schemaColumns, listSpec("string"), []); + const oldSchemaColumns = Cast(this.props.Document.schemaColumns, listSpec("string"), []); if (oldSchemaColumns && oldSchemaColumns.length && typeof oldSchemaColumns[0] !== "object") { - let newSchemaColumns = oldSchemaColumns.map(i => typeof i === "string" ? new SchemaHeaderField(i, "#f1efeb") : i); + const newSchemaColumns = oldSchemaColumns.map(i => typeof i === "string" ? new SchemaHeaderField(i, "#f1efeb") : i); this.props.Document.schemaColumns = new List(newSchemaColumns); } } @@ -418,10 +418,10 @@ export class SchemaTable extends React.Component { private getTdProps: ComponentPropsGetterR = (state, rowInfo, column, instance) => { if (!rowInfo || column) return {}; - let row = rowInfo.index; + const row = rowInfo.index; //@ts-ignore - let col = this.columns.map(c => c.heading).indexOf(column!.id); - let isFocused = this._focusedCell.row === row && this._focusedCell.col === col && this.props.isFocused(this.props.Document); + const col = this.columns.map(c => c.heading).indexOf(column!.id); + const isFocused = this._focusedCell.row === row && this._focusedCell.col === col && this.props.isFocused(this.props.Document); // TODO: editing border doesn't work :( return { style: { @@ -432,7 +432,7 @@ export class SchemaTable extends React.Component { @action onCloseCollection = (collection: Doc): void => { - let index = this._openCollections.findIndex(col => col === collection[Id]); + const index = this._openCollections.findIndex(col => col === collection[Id]); if (index > -1) this._openCollections.splice(index, 1); } @@ -450,7 +450,7 @@ export class SchemaTable extends React.Component { @action onKeyDown = (e: KeyboardEvent): void => { if (!this._cellIsEditing && !this._headerIsEditing && this.props.isFocused(this.props.Document)) {// && this.props.isSelected(true)) { - let direction = e.key === "Tab" ? "tab" : e.which === 39 ? "right" : e.which === 37 ? "left" : e.which === 38 ? "up" : e.which === 40 ? "down" : ""; + const direction = e.key === "Tab" ? "tab" : e.which === 39 ? "right" : e.which === 37 ? "left" : e.which === 38 ? "up" : e.which === 40 ? "down" : ""; this._focusedCell = this.changeFocusedCellByDirection(direction, this._focusedCell.row, this._focusedCell.col); const pdoc = FieldValue(this.childDocs[this._focusedCell.row]); @@ -479,7 +479,7 @@ export class SchemaTable extends React.Component { @undoBatch createRow = () => { - let newDoc = Docs.Create.TextDocument({ title: "", width: 100, height: 30 }); + const newDoc = Docs.Create.TextDocument({ title: "", width: 100, height: 30 }); this.props.addDocument(newDoc); } @@ -498,7 +498,7 @@ export class SchemaTable extends React.Component { @undoBatch @action deleteColumn = (key: string) => { - let columns = this.columns; + const columns = this.columns; if (columns === undefined) { this.columns = new List([]); } else { @@ -513,7 +513,7 @@ export class SchemaTable extends React.Component { @undoBatch @action changeColumns = (oldKey: string, newKey: string, addNew: boolean) => { - let columns = this.columns; + const columns = this.columns; if (columns === undefined) { this.columns = new List([new SchemaHeaderField(newKey, "f1efeb")]); } else { @@ -523,7 +523,7 @@ export class SchemaTable extends React.Component { } else { const index = columns.map(c => c.heading).indexOf(oldKey); if (index > -1) { - let column = columns[index]; + const column = columns[index]; column.setHeading(newKey); columns[index] = column; this.columns = columns; @@ -554,8 +554,8 @@ export class SchemaTable extends React.Component { setColumnType = (columnField: SchemaHeaderField, type: ColumnType): void => { if (columnTypes.get(columnField.heading)) return; - let columns = this.columns; - let index = columns.indexOf(columnField); + const columns = this.columns; + const index = columns.indexOf(columnField); if (index > -1) { columnField.setType(NumCast(type)); columns[index] = columnField; @@ -575,8 +575,8 @@ export class SchemaTable extends React.Component { @undoBatch setColumnColor = (columnField: SchemaHeaderField, color: string): void => { - let columns = this.columns; - let index = columns.indexOf(columnField); + const columns = this.columns; + const index = columns.indexOf(columnField); if (index > -1) { columnField.setColor(color); columns[index] = columnField; @@ -589,10 +589,10 @@ export class SchemaTable extends React.Component { @undoBatch reorderColumns = (toMove: SchemaHeaderField, relativeTo: SchemaHeaderField, before: boolean, columnsValues: SchemaHeaderField[]) => { - let columns = [...columnsValues]; - let oldIndex = columns.indexOf(toMove); - let relIndex = columns.indexOf(relativeTo); - let newIndex = (oldIndex > relIndex && !before) ? relIndex + 1 : (oldIndex < relIndex && before) ? relIndex - 1 : relIndex; + const columns = [...columnsValues]; + const oldIndex = columns.indexOf(toMove); + const relIndex = columns.indexOf(relativeTo); + const newIndex = (oldIndex > relIndex && !before) ? relIndex + 1 : (oldIndex < relIndex && before) ? relIndex - 1 : relIndex; if (oldIndex === newIndex) return; @@ -603,17 +603,17 @@ export class SchemaTable extends React.Component { @undoBatch @action setColumnSort = (columnField: SchemaHeaderField, descending: boolean | undefined) => { - let columns = this.columns; - let index = columns.findIndex(c => c.heading === columnField.heading); - let column = columns[index]; + const columns = this.columns; + const index = columns.findIndex(c => c.heading === columnField.heading); + const column = columns[index]; column.setDesc(descending); columns[index] = column; this.columns = columns; } get documentKeys() { - let docs = this.childDocs; - let keys: { [key: string]: boolean } = {}; + const docs = this.childDocs; + const keys: { [key: string]: boolean } = {}; // bcz: ugh. this is untracked since otherwise a large collection of documents will blast the server for all their fields. // then as each document's fields come back, we update the documents _proxies. Each time we do this, the whole schema will be // invalidated and re-rendered. This workaround will inquire all of the document fields before the options button is clicked. @@ -628,8 +628,8 @@ export class SchemaTable extends React.Component { @action toggleTextWrapRow = (doc: Doc): void => { - let textWrapped = this.textWrappedRows; - let index = textWrapped.findIndex(id => doc[Id] === id); + const textWrapped = this.textWrappedRows; + const index = textWrapped.findIndex(id => doc[Id] === id); index > -1 ? textWrapped.splice(index, 1) : textWrapped.push(doc[Id]); @@ -638,10 +638,10 @@ export class SchemaTable extends React.Component { @computed get reactTable() { - let children = this.childDocs; - let hasCollectionChild = children.reduce((found, doc) => found || doc.type === "collection", false); - let expandedRowsList = this._openCollections.map(col => children.findIndex(doc => doc[Id] === col).toString()); - let expanded = {}; + const children = this.childDocs; + const hasCollectionChild = children.reduce((found, doc) => found || doc.type === "collection", false); + const expandedRowsList = this._openCollections.map(col => children.findIndex(doc => doc[Id] === col).toString()); + const expanded = {}; //@ts-ignore expandedRowsList.forEach(row => expanded[row] = true); console.log("text wrapped rows", ...[...this.textWrappedRows]); // TODO: get component to rerender on text wrap change without needign to console.log :(((( @@ -668,10 +668,10 @@ export class SchemaTable extends React.Component { } onResizedChange = (newResized: Resize[], event: any) => { - let columns = this.columns; + const columns = this.columns; newResized.forEach(resized => { - let index = columns.findIndex(c => c.heading === resized.id); - let column = columns[index]; + const index = columns.findIndex(c => c.heading === resized.id); + const column = columns[index]; column.setWidth(resized.value); columns[index] = column; }); @@ -688,16 +688,16 @@ export class SchemaTable extends React.Component { makeDB = async () => { let csv: string = this.columns.reduce((val, col) => val + col + ",", ""); csv = csv.substr(0, csv.length - 1) + "\n"; - let self = this; + const self = this; this.childDocs.map(doc => { csv += self.columns.reduce((val, col) => val + (doc[col.heading] ? doc[col.heading]!.toString() : "0") + ",", ""); csv = csv.substr(0, csv.length - 1) + "\n"; }); csv.substring(0, csv.length - 1); - let dbName = StrCast(this.props.Document.title); - let res = await Gateway.Instance.PostSchema(csv, dbName); + const dbName = StrCast(this.props.Document.title); + const res = await Gateway.Instance.PostSchema(csv, dbName); if (self.props.CollectionView && self.props.CollectionView.props.addDocument) { - let schemaDoc = await Docs.Create.DBDocument("https://www.cs.brown.edu/" + dbName, { title: dbName }, { dbDoc: self.props.Document }); + const schemaDoc = await Docs.Create.DBDocument("https://www.cs.brown.edu/" + dbName, { title: dbName }, { dbDoc: self.props.Document }); if (schemaDoc) { //self.props.CollectionView.props.addDocument(schemaDoc, false); self.props.Document.schemaDoc = schemaDoc; @@ -706,7 +706,7 @@ export class SchemaTable extends React.Component { } getField = (row: number, col?: number) => { - let docs = this.childDocs; + const docs = this.childDocs; row = row % docs.length; while (row < 0) row += docs.length; diff --git a/src/client/views/collections/CollectionStackingView.tsx b/src/client/views/collections/CollectionStackingView.tsx index e564f1193..955fcda80 100644 --- a/src/client/views/collections/CollectionStackingView.tsx +++ b/src/client/views/collections/CollectionStackingView.tsx @@ -1,7 +1,7 @@ import React = require("react"); import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { CursorProperty } from "csstype"; -import { action, computed, IReactionDisposer, observable, reaction, runInAction, trace } from "mobx"; +import { action, computed, IReactionDisposer, observable, reaction, runInAction } from "mobx"; import { observer } from "mobx-react"; import Switch from 'rc-switch'; import { Doc, HeightSym, WidthSym } from "../../../new_fields/Doc"; @@ -10,7 +10,7 @@ import { List } from "../../../new_fields/List"; import { listSpec } from "../../../new_fields/Schema"; import { SchemaHeaderField } from "../../../new_fields/SchemaHeaderField"; import { BoolCast, Cast, NumCast, StrCast, ScriptCast } from "../../../new_fields/Types"; -import { emptyFunction, Utils, numberRange } from "../../../Utils"; +import { emptyFunction, Utils } from "../../../Utils"; import { DocumentType } from "../../documents/DocumentTypes"; import { DragManager } from "../../util/DragManager"; import { Transform } from "../../util/Transform"; @@ -56,15 +56,15 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { children(docs: Doc[]) { this._docXfs.length = 0; return docs.map((d, i) => { - let pair = Doc.GetLayoutDataDocPair(this.props.Document, this.props.DataDoc, this.props.fieldKey, d); - let layoutDoc = pair.layout ? Doc.Layout(pair.layout) : d; - let width = () => Math.min(layoutDoc.nativeWidth && !layoutDoc.ignoreAspect && !this.props.Document.fillColumn ? layoutDoc[WidthSym]() : Number.MAX_VALUE, this.columnWidth / this.numGroupColumns); - let height = () => this.getDocHeight(layoutDoc); - let dref = React.createRef(); - let dxf = () => this.getDocTransform(layoutDoc, dref.current!); + const pair = Doc.GetLayoutDataDocPair(this.props.Document, this.props.DataDoc, this.props.fieldKey, d); + const layoutDoc = pair.layout ? Doc.Layout(pair.layout) : d; + const width = () => Math.min(layoutDoc.nativeWidth && !layoutDoc.ignoreAspect && !this.props.Document.fillColumn ? layoutDoc[WidthSym]() : Number.MAX_VALUE, this.columnWidth / this.numGroupColumns); + const height = () => this.getDocHeight(layoutDoc); + const dref = React.createRef(); + const dxf = () => this.getDocTransform(layoutDoc, dref.current!); this._docXfs.push({ dxf: dxf, width: width, height: height }); - let rowSpan = Math.ceil((height() + this.gridGap) / this.gridGap); - let style = this.isStackingView ? { width: width(), marginTop: i === 0 ? 0 : this.gridGap, height: height() } : { gridRowEnd: `span ${rowSpan}` }; + const rowSpan = Math.ceil((height() + this.gridGap) / this.gridGap); + const style = this.isStackingView ? { width: width(), marginTop: i === 0 ? 0 : this.gridGap, height: height() } : { gridRowEnd: `span ${rowSpan}` }; return
      {this.getDisplayDoc(pair.layout || d, pair.data, dxf, width)}
      ; @@ -83,20 +83,20 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { return new Map(); } const sectionHeaders = this.sectionHeaders; - let fields = new Map(sectionHeaders.map(sh => [sh, []] as [SchemaHeaderField, []])); + const fields = new Map(sectionHeaders.map(sh => [sh, []] as [SchemaHeaderField, []])); this.filteredChildren.map(d => { - let sectionValue = (d[this.sectionFilter] ? d[this.sectionFilter] : `NO ${this.sectionFilter.toUpperCase()} VALUE`) as object; + const sectionValue = (d[this.sectionFilter] ? d[this.sectionFilter] : `NO ${this.sectionFilter.toUpperCase()} VALUE`) as object; // the next five lines ensures that floating point rounding errors don't create more than one section -syip - let parsed = parseInt(sectionValue.toString()); - let castedSectionValue = !isNaN(parsed) ? parsed : sectionValue; + const parsed = parseInt(sectionValue.toString()); + const castedSectionValue = !isNaN(parsed) ? parsed : sectionValue; // look for if header exists already - let existingHeader = sectionHeaders.find(sh => sh.heading === (castedSectionValue ? castedSectionValue.toString() : `NO ${this.sectionFilter.toUpperCase()} VALUE`)); + const existingHeader = sectionHeaders.find(sh => sh.heading === (castedSectionValue ? castedSectionValue.toString() : `NO ${this.sectionFilter.toUpperCase()} VALUE`)); if (existingHeader) { fields.get(existingHeader)!.push(d); } else { - let newSchemaHeader = new SchemaHeaderField(castedSectionValue ? castedSectionValue.toString() : `NO ${this.sectionFilter.toUpperCase()} VALUE`); + const newSchemaHeader = new SchemaHeaderField(castedSectionValue ? castedSectionValue.toString() : `NO ${this.sectionFilter.toUpperCase()} VALUE`); fields.set(newSchemaHeader, [d]); sectionHeaders.push(newSchemaHeader); } @@ -108,26 +108,26 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { super.componentDidMount(); this._heightDisposer = reaction(() => { if (this.props.Document.autoHeight) { - let sectionsList = Array.from(this.Sections.size ? this.Sections.values() : [this.filteredChildren]); + const sectionsList = Array.from(this.Sections.size ? this.Sections.values() : [this.filteredChildren]); if (this.isStackingView) { - let res = this.props.ContentScaling() * sectionsList.reduce((maxHght, s) => { - let r1 = Math.max(maxHght, + const res = this.props.ContentScaling() * sectionsList.reduce((maxHght, s) => { + const r1 = Math.max(maxHght, (this.Sections.size ? 50 : 0) + s.reduce((height, d, i) => { - let val = height + this.childDocHeight(d) + (i === s.length - 1 ? this.yMargin : this.gridGap); + const val = height + this.childDocHeight(d) + (i === s.length - 1 ? this.yMargin : this.gridGap); return val; }, this.yMargin)); return r1; }, 0); return res; } else { - let sum = Array.from(this._heightMap.values()).reduce((acc: number, curr: number) => acc += curr, 0); + const sum = Array.from(this._heightMap.values()).reduce((acc: number, curr: number) => acc += curr, 0); return this.props.ContentScaling() * (sum + (this.Sections.size ? (this.props.Document.miniHeaders ? 20 : 85) : -15)); } } return -1; }, (hgt: number) => { - let doc = hgt === -1 ? undefined : this.props.DataDoc && this.props.DataDoc.layout === this.layoutDoc ? this.props.DataDoc : this.layoutDoc; + const doc = hgt === -1 ? undefined : this.props.DataDoc && this.props.DataDoc.layout === this.layoutDoc ? this.props.DataDoc : this.layoutDoc; doc && hgt > 0 && (Doc.Layout(doc).height = hgt); }, { fireImmediately: true } @@ -162,9 +162,9 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { @computed get onClickHandler() { return ScriptCast(this.Document.onChildClick); } getDisplayDoc(doc: Doc, dataDoc: Doc | undefined, dxf: () => Transform, width: () => number) { - let layoutDoc = Doc.Layout(doc); - let height = () => this.getDocHeight(doc); - let finalDxf = () => dxf().scale(this.columnWidth / layoutDoc[WidthSym]()); + const layoutDoc = Doc.Layout(doc); + const height = () => this.getDocHeight(doc); + const finalDxf = () => dxf().scale(this.columnWidth / layoutDoc[WidthSym]()); return doc) { } getDocHeight(d?: Doc) { if (!d) return 0; - let layoutDoc = Doc.Layout(d); - let nw = NumCast(layoutDoc.nativeWidth); - let nh = NumCast(layoutDoc.nativeHeight); + const layoutDoc = Doc.Layout(d); + const nw = NumCast(layoutDoc.nativeWidth); + const nh = NumCast(layoutDoc.nativeHeight); let wid = this.columnWidth / (this.isStackingView ? this.numGroupColumns : 1); if (!layoutDoc.ignoreAspect && !layoutDoc.fitWidth && nw && nh) { - let aspect = nw && nh ? nh / nw : 1; + const aspect = nw && nh ? nh / nw : 1; if (!(d.nativeWidth && !layoutDoc.ignoreAspect && this.props.Document.fillColumn)) wid = Math.min(layoutDoc[WidthSym](), wid); return wid * aspect; } @@ -215,8 +215,8 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { } @action onDividerMove = (e: PointerEvent): void => { - let dragPos = this.props.ScreenToLocalTransform().transformPoint(e.clientX, e.clientY)[0]; - let delta = dragPos - this._columnStart; + const dragPos = this.props.ScreenToLocalTransform().transformPoint(e.clientX, e.clientY)[0]; + const delta = dragPos - this._columnStart; this._columnStart = dragPos; this.layoutDoc.columnWidth = Math.max(10, this.columnWidth + delta); } @@ -238,13 +238,13 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { @undoBatch @action drop = (e: Event, de: DragManager.DropEvent) => { - let where = [de.x, de.y]; + const where = [de.x, de.y]; let targInd = -1; let plusOne = false; if (de.data instanceof DragManager.DocumentDragData) { this._docXfs.map((cd, i) => { - let pos = cd.dxf().inverse().transformPoint(-2 * this.gridGap, -2 * this.gridGap); - let pos1 = cd.dxf().inverse().transformPoint(cd.width(), cd.height()); + const pos = cd.dxf().inverse().transformPoint(-2 * this.gridGap, -2 * this.gridGap); + const pos1 = cd.dxf().inverse().transformPoint(cd.width(), cd.height()); if (where[0] > pos[0] && where[0] < pos1[0] && where[1] > pos[1] && where[1] < pos1[1]) { targInd = i; plusOne = (where[1] > (pos[1] + pos1[1]) / 2 ? 1 : 0) ? true : false; @@ -252,12 +252,12 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { }); } if (super.drop(e, de)) { - let newDoc = de.data.droppedDocuments[0]; - let docs = this.childDocList; + const newDoc = de.data.droppedDocuments[0]; + const docs = this.childDocList; if (docs) { if (targInd === -1) targInd = docs.length; else targInd = docs.indexOf(this.filteredChildren[targInd]); - let srcInd = docs.indexOf(newDoc); + const srcInd = docs.indexOf(newDoc); docs.splice(srcInd, 1); docs.splice((targInd > srcInd ? targInd - 1 : targInd) + (plusOne ? 1 : 0), 0, newDoc); } @@ -267,19 +267,19 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { @undoBatch @action onDrop = async (e: React.DragEvent): Promise => { - let where = [e.clientX, e.clientY]; + const where = [e.clientX, e.clientY]; let targInd = -1; this._docXfs.map((cd, i) => { - let pos = cd.dxf().inverse().transformPoint(-2 * this.gridGap, -2 * this.gridGap); - let pos1 = cd.dxf().inverse().transformPoint(cd.width(), cd.height()); + const pos = cd.dxf().inverse().transformPoint(-2 * this.gridGap, -2 * this.gridGap); + const pos1 = cd.dxf().inverse().transformPoint(cd.width(), cd.height()); if (where[0] > pos[0] && where[0] < pos1[0] && where[1] > pos[1] && where[1] < pos1[1]) { targInd = i; } }); super.onDrop(e, {}, () => { if (targInd !== -1) { - let newDoc = this.childDocs[this.childDocs.length - 1]; - let docs = this.childDocList; + const newDoc = this.childDocs[this.childDocs.length - 1]; + const docs = this.childDocList; if (docs) { docs.splice(docs.length - 1, 1); docs.splice(targInd, 0, newDoc); @@ -289,13 +289,13 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { } headings = () => Array.from(this.Sections.keys()); sectionStacking = (heading: SchemaHeaderField | undefined, docList: Doc[]) => { - let key = this.sectionFilter; + const key = this.sectionFilter; let type: "string" | "number" | "bigint" | "boolean" | "symbol" | "undefined" | "object" | "function" | undefined = undefined; - let types = docList.length ? docList.map(d => typeof d[key]) : this.childDocs.map(d => typeof d[key]); + const types = docList.length ? docList.map(d => typeof d[key]) : this.childDocs.map(d => typeof d[key]); if (types.map((i, idx) => types.indexOf(i) === idx).length === 1) { type = types[0]; } - let cols = () => this.isStackingView ? 1 : Math.max(1, Math.min(this.filteredChildren.length, + const cols = () => this.isStackingView ? 1 : Math.max(1, Math.min(this.filteredChildren.length, Math.floor((this.props.PanelWidth() - 2 * this.xMargin) / (this.columnWidth + this.gridGap)))); return doc) { getDocTransform(doc: Doc, dref: HTMLDivElement) { if (!dref) return Transform.Identity(); - let y = this._scroll; // required for document decorations to update when the text box container is scrolled - let { scale, translateX, translateY } = Utils.GetScreenTransform(dref); - let outerXf = Utils.GetScreenTransform(this._masonryGridRef!); - let offset = this.props.ScreenToLocalTransform().transformDirection(outerXf.translateX - translateX, outerXf.translateY - translateY); + const y = this._scroll; // required for document decorations to update when the text box container is scrolled + const { scale, translateX, translateY } = Utils.GetScreenTransform(dref); + const outerXf = Utils.GetScreenTransform(this._masonryGridRef!); + const offset = this.props.ScreenToLocalTransform().transformDirection(outerXf.translateX - translateX, outerXf.translateY - translateY); return this.props.ScreenToLocalTransform(). translate(offset[0], offset[1] + (this.props.ChromeHeight && this.props.ChromeHeight() < 0 ? this.props.ChromeHeight() : 0)). scale(NumCast(doc.width, 1) / this.columnWidth); } sectionMasonry = (heading: SchemaHeaderField | undefined, docList: Doc[]) => { - let key = this.sectionFilter; + const key = this.sectionFilter; let type: "string" | "number" | "bigint" | "boolean" | "symbol" | "undefined" | "object" | "function" | undefined = undefined; - let types = docList.length ? docList.map(d => typeof d[key]) : this.childDocs.map(d => typeof d[key]); + const types = docList.length ? docList.map(d => typeof d[key]) : this.childDocs.map(d => typeof d[key]); if (types.map((i, idx) => types.indexOf(i) === idx).length === 1) { type = types[0]; } - let rows = () => !this.isStackingView ? 1 : Math.max(1, Math.min(docList.length, + const rows = () => !this.isStackingView ? 1 : Math.max(1, Math.min(docList.length, Math.floor((this.props.PanelWidth() - 2 * this.xMargin) / (this.columnWidth + this.gridGap)))); return doc) { } sortFunc = (a: [SchemaHeaderField, Doc[]], b: [SchemaHeaderField, Doc[]]): 1 | -1 => { - let descending = BoolCast(this.props.Document.stackingHeadersSortDescending); - let firstEntry = descending ? b : a; - let secondEntry = descending ? a : b; + const descending = BoolCast(this.props.Document.stackingHeadersSortDescending); + const firstEntry = descending ? b : a; + const secondEntry = descending ? a : b; return firstEntry[0].heading > secondEntry[0].heading ? 1 : -1; } @@ -369,28 +369,28 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { onContextMenu = (e: React.MouseEvent): void => { // need to test if propagation has stopped because GoldenLayout forces a parallel react hierarchy to be created for its top-level layout if (!e.isPropagationStopped()) { - let subItems: ContextMenuProps[] = []; + const subItems: ContextMenuProps[] = []; subItems.push({ description: `${this.props.Document.fillColumn ? "Variable Size" : "Autosize"} Column`, event: () => this.props.Document.fillColumn = !this.props.Document.fillColumn, icon: "plus" }); subItems.push({ description: `${this.props.Document.showTitles ? "Hide Titles" : "Show Titles"}`, event: () => this.props.Document.showTitles = !this.props.Document.showTitles ? "title" : "", icon: "plus" }); subItems.push({ description: `${this.props.Document.showCaptions ? "Hide Captions" : "Show Captions"}`, event: () => this.props.Document.showCaptions = !this.props.Document.showCaptions ? "caption" : "", icon: "plus" }); ContextMenu.Instance.addItem({ description: "Stacking Options ...", subitems: subItems, icon: "eye" }); - let existingOnClick = ContextMenu.Instance.findByDescription("OnClick..."); - let onClicks: ContextMenuProps[] = existingOnClick && "subitems" in existingOnClick ? existingOnClick.subitems : []; + const existingOnClick = ContextMenu.Instance.findByDescription("OnClick..."); + const onClicks: ContextMenuProps[] = existingOnClick && "subitems" in existingOnClick ? existingOnClick.subitems : []; onClicks.push({ description: "Edit onChildClick script", icon: "edit", event: (obj: any) => ScriptBox.EditButtonScript("On Child Clicked...", this.props.Document, "onChildClick", obj.x, obj.y) }); !existingOnClick && ContextMenu.Instance.addItem({ description: "OnClick...", subitems: onClicks, icon: "hand-point-right" }); } } render() { - let editableViewProps = { + const editableViewProps = { GetValue: () => "", SetValue: this.addGroup, contents: "+ ADD A GROUP" }; let sections = [[undefined, this.filteredChildren] as [SchemaHeaderField | undefined, Doc[]]]; if (this.sectionFilter) { - let entries = Array.from(this.Sections.entries()); + const entries = Array.from(this.Sections.entries()); sections = entries.sort(this.sortFunc); } return ( diff --git a/src/client/views/collections/CollectionStackingViewFieldColumn.tsx b/src/client/views/collections/CollectionStackingViewFieldColumn.tsx index b9d334b10..80dc482af 100644 --- a/src/client/views/collections/CollectionStackingViewFieldColumn.tsx +++ b/src/client/views/collections/CollectionStackingViewFieldColumn.tsx @@ -2,17 +2,14 @@ import React = require("react"); import { library } from '@fortawesome/fontawesome-svg-core'; import { faPalette } from '@fortawesome/free-solid-svg-icons'; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; -import { action, observable, trace, runInAction } from "mobx"; +import { action, observable, runInAction } from "mobx"; import { observer } from "mobx-react"; -import { Doc, WidthSym } from "../../../new_fields/Doc"; -import { Id } from "../../../new_fields/FieldSymbols"; +import { Doc } from "../../../new_fields/Doc"; import { PastelSchemaPalette, SchemaHeaderField } from "../../../new_fields/SchemaHeaderField"; import { ScriptField } from "../../../new_fields/ScriptField"; import { NumCast, StrCast } from "../../../new_fields/Types"; -import { Utils } from "../../../Utils"; import { Docs } from "../../documents/Documents"; import { DragManager } from "../../util/DragManager"; -import { CompileScript } from "../../util/Scripting"; import { SelectionManager } from "../../util/SelectionManager"; import { Transform } from "../../util/Transform"; import { undoBatch } from "../../util/UndoManager"; @@ -61,8 +58,8 @@ export class CollectionStackingViewFieldColumn extends React.Component { this._createAliasSelected = false; if (de.data instanceof DragManager.DocumentDragData) { - let key = StrCast(this.props.parent.props.Document.sectionFilter); - let castedValue = this.getValue(this._heading); + const key = StrCast(this.props.parent.props.Document.sectionFilter); + const castedValue = this.getValue(this._heading); if (castedValue) { de.data.droppedDocuments.forEach(d => d[key] = castedValue); } @@ -74,7 +71,7 @@ export class CollectionStackingViewFieldColumn extends React.Component { - let parsed = parseInt(value); + const parsed = parseInt(value); if (!isNaN(parsed)) { return parsed; } @@ -90,8 +87,8 @@ export class CollectionStackingViewFieldColumn extends React.Component { this._createAliasSelected = false; - let key = StrCast(this.props.parent.props.Document.sectionFilter); - let castedValue = this.getValue(value); + const key = StrCast(this.props.parent.props.Document.sectionFilter); + const castedValue = this.getValue(value); if (castedValue) { if (this.props.parent.sectionHeaders) { if (this.props.parent.sectionHeaders.map(i => i.heading).indexOf(castedValue.toString()) > -1) { @@ -135,11 +132,11 @@ export class CollectionStackingViewFieldColumn extends React.Component { this._createAliasSelected = false; - let key = StrCast(this.props.parent.props.Document.sectionFilter); - let newDoc = Docs.Create.TextDocument({ height: 18, width: 200, documentText: "@@@" + value, title: value, autoHeight: true }); + const key = StrCast(this.props.parent.props.Document.sectionFilter); + const newDoc = Docs.Create.TextDocument({ height: 18, width: 200, documentText: "@@@" + value, title: value, autoHeight: true }); newDoc[key] = this.getValue(this.props.heading); - let maxHeading = this.props.docList.reduce((maxHeading, doc) => NumCast(doc.heading) > maxHeading ? NumCast(doc.heading) : maxHeading, 0); - let heading = maxHeading === 0 || this.props.docList.length === 0 ? 1 : maxHeading === 1 ? 2 : 3; + const maxHeading = this.props.docList.reduce((maxHeading, doc) => NumCast(doc.heading) > maxHeading ? NumCast(doc.heading) : maxHeading, 0); + const heading = maxHeading === 0 || this.props.docList.length === 0 ? 1 : maxHeading === 1 ? 2 : 3; newDoc.heading = heading; return this.props.parent.props.addDocument(newDoc); } @@ -147,10 +144,10 @@ export class CollectionStackingViewFieldColumn extends React.Component { this._createAliasSelected = false; - let key = StrCast(this.props.parent.props.Document.sectionFilter); + const key = StrCast(this.props.parent.props.Document.sectionFilter); this.props.docList.forEach(d => d[key] = undefined); if (this.props.parent.sectionHeaders && this.props.headingObject) { - let index = this.props.parent.sectionHeaders.indexOf(this.props.headingObject); + const index = this.props.parent.sectionHeaders.indexOf(this.props.headingObject); this.props.parent.sectionHeaders.splice(index, 1); } } @@ -166,10 +163,10 @@ export class CollectionStackingViewFieldColumn extends React.Component { - let [dx, dy] = this.props.screenToLocalTransform().transformDirection(e.clientX - this._startDragPosition.x, e.clientY - this._startDragPosition.y); + const [dx, dy] = this.props.screenToLocalTransform().transformDirection(e.clientX - this._startDragPosition.x, e.clientY - this._startDragPosition.y); if (Math.abs(dx) + Math.abs(dy) > this._sensitivity) { - let alias = Doc.MakeAlias(this.props.parent.props.Document); - let key = StrCast(this.props.parent.props.Document.sectionFilter); + const alias = Doc.MakeAlias(this.props.parent.props.Document); + const key = StrCast(this.props.parent.props.Document.sectionFilter); let value = this.getValue(this._heading); value = typeof value === "string" ? `"${value}"` : value; alias.viewSpecScript = ScriptField.MakeFunction(`doc.${key} === ${value}`, { doc: Doc.name }); @@ -195,7 +192,7 @@ export class CollectionStackingViewFieldColumn extends React.Component { - let selected = this.props.headingObject ? this.props.headingObject.color : "#f1efeb"; + const selected = this.props.headingObject ? this.props.headingObject.color : "#f1efeb"; - let pink = PastelSchemaPalette.get("pink2"); - let purple = PastelSchemaPalette.get("purple4"); - let blue = PastelSchemaPalette.get("bluegreen1"); - let yellow = PastelSchemaPalette.get("yellow4"); - let red = PastelSchemaPalette.get("red2"); - let green = PastelSchemaPalette.get("bluegreen7"); - let cyan = PastelSchemaPalette.get("bluegreen5"); - let orange = PastelSchemaPalette.get("orange1"); - let gray = "#f1efeb"; + const pink = PastelSchemaPalette.get("pink2"); + const purple = PastelSchemaPalette.get("purple4"); + const blue = PastelSchemaPalette.get("bluegreen1"); + const yellow = PastelSchemaPalette.get("yellow4"); + const red = PastelSchemaPalette.get("red2"); + const green = PastelSchemaPalette.get("bluegreen7"); + const cyan = PastelSchemaPalette.get("bluegreen5"); + const orange = PastelSchemaPalette.get("orange1"); + const gray = "#f1efeb"; return (
      @@ -243,7 +240,7 @@ export class CollectionStackingViewFieldColumn extends React.Component { - let selected = this._createAliasSelected; + const selected = this._createAliasSelected; return (
      @@ -262,16 +259,16 @@ export class CollectionStackingViewFieldColumn extends React.Component headings.indexOf(i) === idx); - let evContents = heading ? heading : this.props.type && this.props.type === "number" ? "0" : `NO ${key.toUpperCase()} VALUE`; - let headerEditableViewProps = { + const headings = this.props.headings(); + const heading = this._heading; + const style = this.props.parent; + const singleColumn = style.isStackingView; + const uniqueHeadings = headings.map((i, idx) => headings.indexOf(i) === idx); + const evContents = heading ? heading : this.props.type && this.props.type === "number" ? "0" : `NO ${key.toUpperCase()} VALUE`; + const headerEditableViewProps = { GetValue: () => evContents, SetValue: this.headingChanged, contents: evContents, @@ -281,7 +278,7 @@ export class CollectionStackingViewFieldColumn extends React.Component "", SetValue: this.addDocument, contents: "+ NEW", @@ -290,7 +287,7 @@ export class CollectionStackingViewFieldColumn extends React.Component
      : (null); for (let i = 0; i < cols; i++) templatecols += `${style.columnWidth / style.numGroupColumns}px `; - let chromeStatus = this.props.parent.props.Document.chromeStatus; + const chromeStatus = this.props.parent.props.Document.chromeStatus; return (
      diff --git a/src/client/views/collections/CollectionStaffView.tsx b/src/client/views/collections/CollectionStaffView.tsx index 40e860b12..105061f46 100644 --- a/src/client/views/collections/CollectionStaffView.tsx +++ b/src/client/views/collections/CollectionStaffView.tsx @@ -2,7 +2,7 @@ import { CollectionSubView } from "./CollectionSubView"; import { Transform } from "../../util/Transform"; import React = require("react"); import { computed, action, IReactionDisposer, reaction, runInAction, observable } from "mobx"; -import { Doc, HeightSym } from "../../../new_fields/Doc"; +import { Doc } from "../../../new_fields/Doc"; import { NumCast } from "../../../new_fields/Types"; import "./CollectionStaffView.scss"; import { observer } from "mobx-react"; @@ -32,9 +32,9 @@ export class CollectionStaffView extends CollectionSubView(doc => doc) { } @computed get staves() { - let staves = []; + const staves = []; for (let i = 0; i < this._staves; i++) { - let rows = []; + const rows = []; for (let j = 0; j < 5; j++) { rows.push(
      ); } diff --git a/src/client/views/collections/CollectionSubView.tsx b/src/client/views/collections/CollectionSubView.tsx index e80825825..0a2e27165 100644 --- a/src/client/views/collections/CollectionSubView.tsx +++ b/src/client/views/collections/CollectionSubView.tsx @@ -19,7 +19,7 @@ import { FieldViewProps } from "../nodes/FieldView"; import { FormattedTextBox, GoogleRef } from "../nodes/FormattedTextBox"; import { CollectionView } from "./CollectionView"; import React = require("react"); -var path = require('path'); +import { basename } from 'path'; import { GooglePhotos } from "../../apis/google_docs/GooglePhotosClientUtils"; import { ImageUtils } from "../../util/Import & Export/ImageUtils"; import { Networking } from "../../Network"; @@ -92,7 +92,7 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { return Cast(this.dataField, listSpec(Doc)); } get childDocs() { - let docs = DocListCast(this.dataField); + const docs = DocListCast(this.dataField); const viewSpecScript = Cast(this.props.Document.viewSpecScript, ScriptField); return viewSpecScript ? docs.filter(d => viewSpecScript.script.run({ doc: d }, console.log).result) : docs; } @@ -100,10 +100,10 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { @action protected async setCursorPosition(position: [number, number]) { let ind; - let doc = this.props.Document; - let id = CurrentUserUtils.id; - let email = Doc.CurrentUserEmail; - let pos = { x: position[0], y: position[1] }; + const doc = this.props.Document; + const id = CurrentUserUtils.id; + const email = Doc.CurrentUserEmail; + const pos = { x: position[0], y: position[1] }; if (id && email) { const proto = Doc.GetProto(doc); if (!proto) { @@ -123,7 +123,7 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { if (cursors.length > 0 && (ind = cursors.findIndex(entry => entry.data.metadata.id === id)) > -1) { cursors[ind].setPosition(pos); } else { - let entry = new CursorField({ metadata: { id: id, identifier: email, timestamp: Date.now() }, position: pos }); + const entry = new CursorField({ metadata: { id: id, identifier: email, timestamp: Date.now() }, position: pos }); cursors.push(entry); } } @@ -145,7 +145,7 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { if (de.data.dropAction || de.data.userDropAction) { added = de.data.droppedDocuments.reduce((added: boolean, d) => this.props.addDocument(d) || added, false); } else if (de.data.moveDocument) { - let movedDocs = de.data.draggedDocuments; + const movedDocs = de.data.draggedDocuments; added = movedDocs.reduce((added: boolean, d, i) => de.data.droppedDocuments[i] !== d ? this.props.addDocument(de.data.droppedDocuments[i]) : de.data.moveDocument(d, this.props.Document, this.props.addDocument) || added, false); @@ -169,8 +169,8 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { e.stopPropagation(); // bcz: this is a hack to stop propagation when dropping an image on a text document with shift+ctrl return; } - let html = e.dataTransfer.getData("text/html"); - let text = e.dataTransfer.getData("text/plain"); + const html = e.dataTransfer.getData("text/html"); + const text = e.dataTransfer.getData("text/plain"); if (text && text.startsWith("(schemaCtor: (doc: Doc) => T) { e.preventDefault(); if (html && FormattedTextBox.IsFragment(html)) { - let href = FormattedTextBox.GetHref(html); + const href = FormattedTextBox.GetHref(html); if (href) { - let docid = FormattedTextBox.GetDocFromUrl(href); + const docid = FormattedTextBox.GetDocFromUrl(href); if (docid) { // prosemirror text containing link to dash document DocServer.GetRefField(docid).then(f => { if (f instanceof Doc) { @@ -198,19 +198,19 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { return; } if (html && !html.startsWith(" 1 && tags[1].startsWith("img") ? tags[1] : ""; + const img = tags[0].startsWith("img") ? tags[0] : tags.length > 1 && tags[1].startsWith("img") ? tags[1] : ""; if (img) { - let split = img.split("src=\"")[1].split("\"")[0]; - let doc = Docs.Create.ImageDocument(split, { ...options, width: 300 }); + const split = img.split("src=\"")[1].split("\"")[0]; + const doc = Docs.Create.ImageDocument(split, { ...options, width: 300 }); ImageUtils.ExtractExif(doc); this.props.addDocument(doc); return; } else { - let path = window.location.origin + "/doc/"; + const path = window.location.origin + "/doc/"; if (text.startsWith(path)) { - let docid = text.replace(Utils.prepend("/doc/"), "").split("?")[0]; + const docid = text.replace(Utils.prepend("/doc/"), "").split("?")[0]; DocServer.GetRefField(docid).then(f => { if (f instanceof Doc) { if (options.x || options.y) { f.x = options.x; f.y = options.y; } // should be in CollectionFreeFormView @@ -218,7 +218,7 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { } }); } else { - let htmlDoc = Docs.Create.HtmlDocument(html, { ...options, title: "-web page-", width: 300, height: 300, documentText: text }); + const htmlDoc = Docs.Create.HtmlDocument(html, { ...options, title: "-web page-", width: 300, height: 300, documentText: text }); this.props.addDocument(htmlDoc); } return; @@ -231,8 +231,8 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { } let matches: RegExpExecArray | null; if ((matches = /(https:\/\/)?docs\.google\.com\/document\/d\/([^\\]+)\/edit/g.exec(text)) !== null) { - let newBox = Docs.Create.TextDocument({ ...options, width: 400, height: 200, title: "Awaiting title from Google Docs..." }); - let proto = newBox.proto!; + const newBox = Docs.Create.TextDocument({ ...options, width: 400, height: 200, title: "Awaiting title from Google Docs..." }); + const proto = newBox.proto!; const documentId = matches[2]; proto[GoogleRef] = documentId; proto.data = "Please select this document and then click on its pull button to load its contents from from Google Docs..."; @@ -249,17 +249,17 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { const mediaItems = await GooglePhotos.Query.AlbumSearch(albumId); console.log(mediaItems); } - let batch = UndoManager.StartBatch("collection view drop"); - let promises: Promise[] = []; + const batch = UndoManager.StartBatch("collection view drop"); + const promises: Promise[] = []; // tslint:disable-next-line:prefer-for-of for (let i = 0; i < e.dataTransfer.items.length; i++) { - let item = e.dataTransfer.items[i]; + const item = e.dataTransfer.items[i]; if (item.kind === "string" && item.type.indexOf("uri") !== -1) { let str: string; - let prom = new Promise(resolve => e.dataTransfer.items[i].getAsString(resolve)) + const prom = new Promise(resolve => e.dataTransfer.items[i].getAsString(resolve)) .then(action((s: string) => rp.head(Utils.CorsProxy(str = s)))) .then(result => { - let type = result["content-type"]; + const type = result["content-type"]; if (type) { Docs.Get.DocumentFromType(type, str, { ...options, width: 300, nativeWidth: type.indexOf("video") !== -1 ? 600 : 300 }) .then(doc => doc && this.props.addDocument(doc)); @@ -267,23 +267,23 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { }); promises.push(prom); } - let type = item.type; + const type = item.type; if (item.kind === "file") { - let file = item.getAsFile(); - let formData = new FormData(); + const file = item.getAsFile(); + const formData = new FormData(); if (!file || !file.type) { continue; } formData.append('file', file); - let dropFileName = file ? file.name : "-empty-"; + const dropFileName = file ? file.name : "-empty-"; promises.push(Networking.PostFormDataToServer("/upload", formData).then(results => { results.map(action(({ clientAccessPath }: any) => { - let full = { ...options, nativeWidth: type.indexOf("video") !== -1 ? 600 : 300, width: 300, title: dropFileName }; - let pathname = Utils.prepend(clientAccessPath); + const full = { ...options, nativeWidth: type.indexOf("video") !== -1 ? 600 : 300, width: 300, title: dropFileName }; + const pathname = Utils.prepend(clientAccessPath); Docs.Get.DocumentFromType(type, pathname, full).then(doc => { - doc && (Doc.GetProto(doc).fileUpload = path.basename(pathname).replace("upload_", "").replace(/\.[a-z0-9]*$/, "")); + doc && (Doc.GetProto(doc).fileUpload = basename(pathname).replace("upload_", "").replace(/\.[a-z0-9]*$/, "")); doc && this.props.addDocument(doc); }); })); diff --git a/src/client/views/collections/CollectionTreeView.tsx b/src/client/views/collections/CollectionTreeView.tsx index c4b7e2d31..48ea35c6b 100644 --- a/src/client/views/collections/CollectionTreeView.tsx +++ b/src/client/views/collections/CollectionTreeView.tsx @@ -3,7 +3,7 @@ import { faAngleRight, faArrowsAltH, faBell, faCamera, faCaretDown, faCaretRight import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; import { action, computed, observable } from "mobx"; import { observer } from "mobx-react"; -import { Doc, DocListCast, Field, HeightSym, Opt, WidthSym } from '../../../new_fields/Doc'; +import { Doc, DocListCast, Field, HeightSym, WidthSym } from '../../../new_fields/Doc'; import { Id } from '../../../new_fields/FieldSymbols'; import { List } from '../../../new_fields/List'; import { Document, listSpec } from '../../../new_fields/Schema'; @@ -95,11 +95,11 @@ class TreeView extends React.Component { @computed get MAX_EMBED_HEIGHT() { return NumCast(this.props.document.maxEmbedHeight, 300); } @computed get dataDoc() { return this.templateDataDoc ? this.templateDataDoc : this.props.document; } @computed get fieldKey() { - let splits = StrCast(Doc.LayoutField(this.props.document)).split("fieldKey={\""); + const splits = StrCast(Doc.LayoutField(this.props.document)).split("fieldKey={\""); return splits.length > 1 ? splits[1].split("\"")[0] : "data"; } childDocList(field: string) { - let layout = Doc.LayoutField(this.props.document) instanceof Doc ? Doc.LayoutField(this.props.document) as Doc : undefined; + const layout = Doc.LayoutField(this.props.document) instanceof Doc ? Doc.LayoutField(this.props.document) as Doc : undefined; return ((this.props.dataDoc ? Cast(this.props.dataDoc[field], listSpec(Doc)) : undefined) || (layout ? Cast(layout[field], listSpec(Doc)) : undefined) || Cast(this.props.document[field], listSpec(Doc))) as Doc[]; @@ -149,10 +149,10 @@ class TreeView extends React.Component { } onDragMove = (e: PointerEvent): void => { Doc.UnBrushDoc(this.dataDoc); - let pt = [e.clientX, e.clientY] - let rect = this._header!.current!.getBoundingClientRect(); - let before = pt[1] < rect.top + rect.height / 2; - let inside = pt[0] > Math.min(rect.left + 75, rect.left + rect.width * .75) || (!before && this.treeViewOpen && DocListCast(this.dataDoc[this.fieldKey]).length); + const pt = [e.clientX, e.clientY]; + const rect = this._header!.current!.getBoundingClientRect(); + const before = pt[1] < rect.top + rect.height / 2; + const inside = pt[0] > Math.min(rect.left + 75, rect.left + rect.width * .75) || (!before && this.treeViewOpen && DocListCast(this.dataDoc[this.fieldKey]).length); this._header!.current!.className = "treeViewItem-header"; if (inside) this._header!.current!.className += " treeViewItem-header-inside"; else if (before) this._header!.current!.className += " treeViewItem-header-above"; @@ -172,8 +172,8 @@ class TreeView extends React.Component { SetValue={undoBatch((value: string) => Doc.SetInPlace(this.props.document, key, value, false) || true)} OnFillDown={undoBatch((value: string) => { Doc.SetInPlace(this.props.document, key, value, false); - let layoutDoc = this.props.document.layoutCustom instanceof Doc ? Doc.ApplyTemplate(Doc.GetProto(this.props.document.layoutCustom)) : undefined; - let doc = layoutDoc || Docs.Create.FreeformDocument([], { title: "", x: 0, y: 0, width: 100, height: 25, templates: new List([Templates.Title.Layout]) }); + const layoutDoc = this.props.document.layoutCustom instanceof Doc ? Doc.ApplyTemplate(Doc.GetProto(this.props.document.layoutCustom)) : undefined; + const doc = layoutDoc || Docs.Create.FreeformDocument([], { title: "", x: 0, y: 0, width: 100, height: 25, templates: new List([Templates.Title.Layout]) }); TreeView.loadId = doc[Id]; return this.props.addDocument(doc); })} @@ -205,7 +205,7 @@ class TreeView extends React.Component { ContextMenu.Instance.addItem({ description: "Delete Workspace", event: () => this.props.deleteDoc(this.props.document), icon: "trash-alt" }); ContextMenu.Instance.addItem({ description: "Create New Workspace", event: () => MainView.Instance.createNewWorkspace(), icon: "plus" }); } - ContextMenu.Instance.addItem({ description: "Open Fields", event: () => { let kvp = Docs.Create.KVPDocument(this.props.document, { width: 300, height: 300 }); this.props.addDocTab(kvp, this.props.dataDoc ? this.props.dataDoc : kvp, "onRight"); }, icon: "layer-group" }); + ContextMenu.Instance.addItem({ description: "Open Fields", event: () => { const kvp = Docs.Create.KVPDocument(this.props.document, { width: 300, height: 300 }); this.props.addDocTab(kvp, this.props.dataDoc ? this.props.dataDoc : kvp, "onRight"); }, icon: "layer-group" }); ContextMenu.Instance.addItem({ description: "Publish", event: () => DocUtils.Publish(this.props.document, StrCast(this.props.document.title), () => { }, () => { }), icon: "file" }); ContextMenu.Instance.displayMenu(e.pageX > 156 ? e.pageX - 156 : 0, e.pageY - 15); e.stopPropagation(); @@ -215,13 +215,13 @@ class TreeView extends React.Component { @undoBatch treeDrop = (e: Event, de: DragManager.DropEvent) => { - let pt = [de.x, de.y]; - let rect = this._header!.current!.getBoundingClientRect(); - let before = pt[1] < rect.top + rect.height / 2; - let inside = pt[0] > Math.min(rect.left + 75, rect.left + rect.width * .75) || (!before && this.treeViewOpen && DocListCast(this.dataDoc[this.fieldKey]).length); + const pt = [de.x, de.y]; + const rect = this._header!.current!.getBoundingClientRect(); + const before = pt[1] < rect.top + rect.height / 2; + const inside = pt[0] > Math.min(rect.left + 75, rect.left + rect.width * .75) || (!before && this.treeViewOpen && DocListCast(this.dataDoc[this.fieldKey]).length); if (de.data instanceof DragManager.LinkDragData) { - let sourceDoc = de.data.linkSourceDocument; - let destDoc = this.props.document; + const sourceDoc = de.data.linkSourceDocument; + const destDoc = this.props.document; DocUtils.MakeLink({ doc: sourceDoc }, { doc: destDoc }); e.stopPropagation(); } @@ -232,7 +232,7 @@ class TreeView extends React.Component { if (inside) { addDoc = (doc: Doc) => Doc.AddDocToList(this.dataDoc, this.fieldKey, doc) || addDoc(doc); } - let movedDocs = (de.data.options === this.props.treeViewId ? de.data.draggedDocuments : de.data.droppedDocuments); + const movedDocs = (de.data.options === this.props.treeViewId ? de.data.draggedDocuments : de.data.droppedDocuments); return ((de.data.dropAction && (de.data.options !== this.props.treeViewId)) || de.data.userDropAction) ? de.data.droppedDocuments.reduce((added, d) => addDoc(d) || added, false) : de.data.moveDocument ? @@ -243,23 +243,23 @@ class TreeView extends React.Component { } docTransform = () => { - let { scale, translateX, translateY } = Utils.GetScreenTransform(this._dref.current!); - let outerXf = this.props.outerXf(); - let offset = this.props.ScreenToLocalTransform().transformDirection(outerXf.translateX - translateX, outerXf.translateY - translateY); - let finalXf = this.props.ScreenToLocalTransform().translate(offset[0], offset[1] + (this.props.ChromeHeight && this.props.ChromeHeight() < 0 ? this.props.ChromeHeight() : 0)); + const { scale, translateX, translateY } = Utils.GetScreenTransform(this._dref.current!); + const outerXf = this.props.outerXf(); + const offset = this.props.ScreenToLocalTransform().transformDirection(outerXf.translateX - translateX, outerXf.translateY - translateY); + const finalXf = this.props.ScreenToLocalTransform().translate(offset[0], offset[1] + (this.props.ChromeHeight && this.props.ChromeHeight() < 0 ? this.props.ChromeHeight() : 0)); return finalXf; } docWidth = () => { - let layoutDoc = Doc.Layout(this.props.document); - let aspect = NumCast(layoutDoc.nativeHeight) / NumCast(layoutDoc.nativeWidth); + const layoutDoc = Doc.Layout(this.props.document); + const aspect = NumCast(layoutDoc.nativeHeight) / NumCast(layoutDoc.nativeWidth); if (aspect) return Math.min(layoutDoc[WidthSym](), Math.min(this.MAX_EMBED_HEIGHT / aspect, this.props.panelWidth() - 20)); return NumCast(layoutDoc.nativeWidth) ? Math.min(layoutDoc[WidthSym](), this.props.panelWidth() - 20) : this.props.panelWidth() - 20; } docHeight = () => { - let layoutDoc = Doc.Layout(this.props.document); - let bounds = this.boundsOfCollectionDocument; + const layoutDoc = Doc.Layout(this.props.document); + const bounds = this.boundsOfCollectionDocument; return Math.min(this.MAX_EMBED_HEIGHT, (() => { - let aspect = NumCast(layoutDoc.nativeHeight) / NumCast(layoutDoc.nativeWidth, 1); + const aspect = NumCast(layoutDoc.nativeHeight) / NumCast(layoutDoc.nativeWidth, 1); if (aspect) return this.docWidth() * aspect; if (bounds) return this.docWidth() * (bounds.b - bounds.y) / (bounds.r - bounds.x); return layoutDoc.fitWidth ? (!this.props.document.nativeHeight ? NumCast(this.props.containingCollection.height) : @@ -270,18 +270,18 @@ class TreeView extends React.Component { } @computed get expandedField() { - let ids: { [key: string]: string } = {}; - let doc = this.props.document; + const ids: { [key: string]: string } = {}; + const doc = this.props.document; doc && Object.keys(doc).forEach(key => !(key in ids) && doc[key] !== ComputedField.undefined && (ids[key] = key)); - let rows: JSX.Element[] = []; - for (let key of Object.keys(ids).slice().sort()) { - let contents = doc[key]; + const rows: JSX.Element[] = []; + for (const key of Object.keys(ids).slice().sort()) { + const contents = doc[key]; let contentElement: (JSX.Element | null)[] | JSX.Element = []; if (contents instanceof Doc || Cast(contents, listSpec(Doc))) { - let remDoc = (doc: Doc) => this.remove(doc, key); - let addDoc = (doc: Doc, addBefore?: Doc, before?: boolean) => Doc.AddDocToList(this.dataDoc, key, doc, addBefore, before, false, true); + const remDoc = (doc: Doc) => this.remove(doc, key); + const addDoc = (doc: Doc, addBefore?: Doc, before?: boolean) => Doc.AddDocToList(this.dataDoc, key, doc, addBefore, before, false, true); contentElement = TreeView.GetChildElements(contents instanceof Doc ? [contents] : DocListCast(contents), this.props.treeViewId, doc, undefined, key, this.props.containingCollection, this.props.prevSibling, addDoc, remDoc, this.move, this.props.dropAction, this.props.addDocTab, this.props.pinToPres, this.props.ScreenToLocalTransform, this.props.outerXf, this.props.active, @@ -310,9 +310,9 @@ class TreeView extends React.Component { @computed get renderContent() { const expandKey = this.treeViewExpandedView === this.fieldKey ? this.fieldKey : this.treeViewExpandedView === "links" ? "links" : undefined; if (expandKey !== undefined) { - let remDoc = (doc: Doc) => this.remove(doc, expandKey); - let addDoc = (doc: Doc, addBefore?: Doc, before?: boolean) => Doc.AddDocToList(this.dataDoc, expandKey, doc, addBefore, before, false, true); - let docs = expandKey === "links" ? this.childLinks : this.childDocs; + const remDoc = (doc: Doc) => this.remove(doc, expandKey); + const addDoc = (doc: Doc, addBefore?: Doc, before?: boolean) => Doc.AddDocToList(this.dataDoc, expandKey, doc, addBefore, before, false, true); + const docs = expandKey === "links" ? this.childLinks : this.childDocs; return
        {!docs ? (null) : TreeView.GetChildElements(docs, this.props.treeViewId, Doc.Layout(this.props.document), @@ -326,7 +326,7 @@ class TreeView extends React.Component { {this.expandedField}
    ; } else { - let layoutDoc = Doc.Layout(this.props.document); + const layoutDoc = Doc.Layout(this.props.document); return
    { */ @computed get renderTitle() { - let reference = React.createRef(); - let onItemDown = SetupDrag(reference, () => this.dataDoc, this.move, this.props.dropAction, this.props.treeViewId, true); + const reference = React.createRef(); + const onItemDown = SetupDrag(reference, () => this.dataDoc, this.move, this.props.dropAction, this.props.treeViewId, true); - let headerElements = ( + const headerElements = ( { if (this.treeViewOpen) { @@ -379,7 +379,7 @@ class TreeView extends React.Component { })}> {this.treeViewExpandedView} ); - let openRight = (
    + const openRight = (
    ); return <> @@ -440,28 +440,28 @@ class TreeView extends React.Component { childDocs = childDocs.filter(d => viewSpecScript.script.run({ doc: d }, console.log).result); } - let docs = childDocs.slice(); - let dataExtension = containingCollection[key + "_ext"] as Doc; - let ascending = dataExtension && BoolCast(dataExtension.sortAscending, null); + const docs = childDocs.slice(); + const dataExtension = containingCollection[key + "_ext"] as Doc; + const ascending = dataExtension && BoolCast(dataExtension.sortAscending, null); if (ascending !== undefined) { - let sortAlphaNum = (a: string, b: string): 0 | 1 | -1 => { - var reN = /[0-9]*$/; - var aA = a.replace(reN, ""); // get rid of trailing numbers - var bA = b.replace(reN, ""); + const sortAlphaNum = (a: string, b: string): 0 | 1 | -1 => { + const reN = /[0-9]*$/; + const aA = a.replace(reN, ""); // get rid of trailing numbers + const bA = b.replace(reN, ""); if (aA === bA) { // if header string matches, then compare numbers numerically - var aN = parseInt(a.match(reN)![0], 10); - var bN = parseInt(b.match(reN)![0], 10); + const aN = parseInt(a.match(reN)![0], 10); + const bN = parseInt(b.match(reN)![0], 10); return aN === bN ? 0 : aN > bN ? 1 : -1; } else { return aA > bA ? 1 : -1; } - } + }; docs.sort(function (a, b): 0 | 1 | -1 { - let descA = ascending ? b : a; - let descB = ascending ? a : b; - let first = descA.title; - let second = descB.title; + const descA = ascending ? b : a; + const descB = ascending ? a : b; + const first = descA.title; + const second = descB.title; // TODO find better way to sort how to sort.................. if (typeof first === 'number' && typeof second === 'number') { return (first - second) > 0 ? 1 : -1; @@ -479,17 +479,17 @@ class TreeView extends React.Component { }); } - let rowWidth = () => panelWidth() - 20; + const rowWidth = () => panelWidth() - 20; return docs.map((child, i) => { const pair = Doc.GetLayoutDataDocPair(containingCollection, dataDoc, key, child); if (!pair.layout || pair.data instanceof Promise) { return (null); } - let indent = i === 0 ? undefined : () => { + const indent = i === 0 ? undefined : () => { if (StrCast(docs[i - 1].layout).indexOf("fieldKey") !== -1) { - let fieldKeysub = StrCast(docs[i - 1].layout).split("fieldKey")[1]; - let fieldKey = fieldKeysub.split("\"")[1]; + const fieldKeysub = StrCast(docs[i - 1].layout).split("fieldKey")[1]; + const fieldKey = fieldKeysub.split("\"")[1]; if (fieldKey && Cast(docs[i - 1][fieldKey], listSpec(Doc)) !== undefined) { Doc.AddDocToList(docs[i - 1], fieldKey, child); docs[i - 1].treeViewOpen = true; @@ -497,21 +497,21 @@ class TreeView extends React.Component { } } }; - let outdent = !parentCollectionDoc ? undefined : () => { + const outdent = !parentCollectionDoc ? undefined : () => { if (StrCast(parentCollectionDoc.layout).indexOf("fieldKey") !== -1) { - let fieldKeysub = StrCast(parentCollectionDoc.layout).split("fieldKey")[1]; - let fieldKey = fieldKeysub.split("\"")[1]; + const fieldKeysub = StrCast(parentCollectionDoc.layout).split("fieldKey")[1]; + const fieldKey = fieldKeysub.split("\"")[1]; Doc.AddDocToList(parentCollectionDoc, fieldKey, child, parentPrevSibling, false); parentCollectionDoc.treeViewOpen = true; remove(child); } }; - let addDocument = (doc: Doc, relativeTo?: Doc, before?: boolean) => { + const addDocument = (doc: Doc, relativeTo?: Doc, before?: boolean) => { return add(doc, relativeTo ? relativeTo : docs[i], before !== undefined ? before : false); }; const childLayout = Doc.Layout(pair.layout); - let rowHeight = () => { - let aspect = NumCast(childLayout.nativeWidth, 0) / NumCast(childLayout.nativeHeight, 0); + const rowHeight = () => { + const aspect = NumCast(childLayout.nativeWidth, 0) / NumCast(childLayout.nativeHeight, 0); return aspect ? Math.min(childLayout[WidthSym](), rowWidth()) / aspect : childLayout[HeightSym](); }; return !(child instanceof Doc) ? (null) : { - let children = Cast(this.props.Document[this.props.fieldKey], listSpec(Doc), []); + const children = Cast(this.props.Document[this.props.fieldKey], listSpec(Doc), []); if (children.indexOf(document) !== -1) { children.splice(children.indexOf(document), 1); return true; @@ -587,7 +587,7 @@ export class CollectionTreeView extends CollectionSubView(Document) { e.preventDefault(); ContextMenu.Instance.displayMenu(e.pageX - 15, e.pageY - 15); } else { - let layoutItems: ContextMenuProps[] = []; + const layoutItems: ContextMenuProps[] = []; layoutItems.push({ description: (this.props.Document.preventTreeViewOpen ? "Persist" : "Abandon") + "Treeview State", event: () => this.props.Document.preventTreeViewOpen = !this.props.Document.preventTreeViewOpen, icon: "paint-brush" }); layoutItems.push({ description: (this.props.Document.hideHeaderFields ? "Show" : "Hide") + " Header Fields", event: () => this.props.Document.hideHeaderFields = !this.props.Document.hideHeaderFields, icon: "paint-brush" }); ContextMenu.Instance.addItem({ description: "Treeview Options ...", subitems: layoutItems, icon: "eye" }); @@ -606,9 +606,9 @@ export class CollectionTreeView extends CollectionSubView(Document) { } render() { - let dropAction = StrCast(this.props.Document.dropAction) as dropActionType; - let addDoc = (doc: Doc, relativeTo?: Doc, before?: boolean) => Doc.AddDocToList(this.props.Document, this.props.fieldKey, doc, relativeTo, before, false, false, false); - let moveDoc = (d: Doc, target: Doc, addDoc: (doc: Doc) => boolean) => this.props.moveDocument(d, target, addDoc); + const dropAction = StrCast(this.props.Document.dropAction) as dropActionType; + const addDoc = (doc: Doc, relativeTo?: Doc, before?: boolean) => Doc.AddDocToList(this.props.Document, this.props.fieldKey, doc, relativeTo, before, false, false, false); + const moveDoc = (d: Doc, target: Doc, addDoc: (doc: Doc) => boolean) => this.props.moveDocument(d, target, addDoc); return !this.childDocs ? (null) : (
    Doc.SetInPlace(this.dataDoc, "title", value, false) || true)} OnFillDown={undoBatch((value: string) => { Doc.SetInPlace(this.dataDoc, "title", value, false); - let layoutDoc = this.props.Document.layoutCustom instanceof Doc ? Doc.ApplyTemplate(Doc.GetProto(this.props.Document.layoutCustom)) : undefined; - let doc = layoutDoc || Docs.Create.FreeformDocument([], { title: "", x: 0, y: 0, width: 100, height: 25, templates: new List([Templates.Title.Layout]) }); + const layoutDoc = this.props.Document.layoutCustom instanceof Doc ? Doc.ApplyTemplate(Doc.GetProto(this.props.Document.layoutCustom)) : undefined; + const doc = layoutDoc || Docs.Create.FreeformDocument([], { title: "", x: 0, y: 0, width: 100, height: 25, templates: new List([Templates.Title.Layout]) }); TreeView.loadId = doc[Id]; Doc.AddDocToList(this.props.Document, this.props.fieldKey, doc, this.childDocs.length ? this.childDocs[0] : undefined, true, false, false, false); })} /> diff --git a/src/client/views/collections/CollectionView.tsx b/src/client/views/collections/CollectionView.tsx index 4c49054d2..54f5a2c42 100644 --- a/src/client/views/collections/CollectionView.tsx +++ b/src/client/views/collections/CollectionView.tsx @@ -84,7 +84,7 @@ export class CollectionView extends Touchable { public static SetSafeMode(safeMode: boolean) { this._safeMode = safeMode; } get collectionViewType(): CollectionViewType | undefined { - let viewField = Cast(this.props.Document.viewType, "number"); + const viewField = Cast(this.props.Document.viewType, "number"); if (CollectionView._safeMode) { if (viewField === CollectionViewType.Freeform) { return CollectionViewType.Tree; @@ -101,7 +101,7 @@ export class CollectionView extends Touchable { () => { // chrome status is one of disabled, collapsed, or visible. this determines initial state from document // chrome status may also be view-mode, in reference to stacking view's toggle mode. it is essentially disabled mode, but prevents the toggle button from showing up on the left sidebar. - let chromeStatus = this.props.Document.chromeStatus; + const chromeStatus = this.props.Document.chromeStatus; if (chromeStatus && (chromeStatus === "disabled" || chromeStatus === "collapsed")) { runInAction(() => this._collapsed = true); } @@ -119,9 +119,9 @@ export class CollectionView extends Touchable { @action.bound addDocument(doc: Doc): boolean { - let targetDataDoc = Doc.GetProto(this.props.Document); + const targetDataDoc = Doc.GetProto(this.props.Document); Doc.AddDocToList(targetDataDoc, this.props.fieldKey, doc); - let extension = Doc.fieldExtensionDoc(targetDataDoc, this.props.fieldKey); // set metadata about the field being rendered (ie, the set of documents) on an extension field for that field + const extension = Doc.fieldExtensionDoc(targetDataDoc, this.props.fieldKey); // set metadata about the field being rendered (ie, the set of documents) on an extension field for that field extension && (extension.lastModified = new DateField(new Date(Date.now()))); Doc.GetProto(doc).lastOpened = new DateField; return true; @@ -129,9 +129,9 @@ export class CollectionView extends Touchable { @action.bound removeDocument(doc: Doc): boolean { - let docView = DocumentManager.Instance.getDocumentView(doc, this.props.ContainingCollectionView); + const docView = DocumentManager.Instance.getDocumentView(doc, this.props.ContainingCollectionView); docView && SelectionManager.DeselectDoc(docView); - let value = Cast(this.props.Document[this.props.fieldKey], listSpec(Doc), []); + const value = Cast(this.props.Document[this.props.fieldKey], listSpec(Doc), []); let index = value.reduce((p, v, i) => (v instanceof Doc && v === doc) ? i : p, -1); index = index !== -1 ? index : value.reduce((p, v, i) => (v instanceof Doc && Doc.AreProtosEqual(v, doc)) ? i : p, -1); @@ -163,7 +163,7 @@ export class CollectionView extends Touchable { } private SubViewHelper = (type: CollectionViewType, renderProps: CollectionRenderProps) => { - let props = { ...this.props, ...renderProps, chromeCollapsed: this._collapsed, ChromeHeight: this.chromeHeight, CollectionView: this, annotationsKey: "" }; + const props = { ...this.props, ...renderProps, chromeCollapsed: this._collapsed, ChromeHeight: this.chromeHeight, CollectionView: this, annotationsKey: "" }; switch (type) { case CollectionViewType.Schema: return (); case CollectionViewType.Docking: return (); @@ -186,7 +186,7 @@ export class CollectionView extends Touchable { private SubView = (type: CollectionViewType, renderProps: CollectionRenderProps) => { // currently cant think of a reason for collection docking view to have a chrome. mind may change if we ever have nested docking views -syip - let chrome = this.props.Document.chromeStatus === "disabled" || type === CollectionViewType.Docking ? (null) : + const chrome = this.props.Document.chromeStatus === "disabled" || type === CollectionViewType.Docking ? (null) : ; return [chrome, this.SubViewHelper(type, renderProps)]; } @@ -194,8 +194,8 @@ export class CollectionView extends Touchable { onContextMenu = (e: React.MouseEvent): void => { if (!e.isPropagationStopped() && this.props.Document[Id] !== CurrentUserUtils.MainDocId) { // need to test this because GoldenLayout causes a parallel hierarchy in the React DOM for its children and the main document view7 - let existingVm = ContextMenu.Instance.findByDescription("View Modes..."); - let subItems = existingVm && "subitems" in existingVm ? existingVm.subitems : []; + const existingVm = ContextMenu.Instance.findByDescription("View Modes..."); + const subItems = existingVm && "subitems" in existingVm ? existingVm.subitems : []; subItems.push({ description: "Freeform", event: () => { this.props.Document.viewType = CollectionViewType.Freeform; }, icon: "signature" }); if (CollectionView._safeMode) { ContextMenu.Instance.addItem({ description: "Test Freeform", event: () => this.props.Document.viewType = CollectionViewType.Invalid, icon: "project-diagram" }); @@ -221,13 +221,13 @@ export class CollectionView extends Touchable { subItems.push({ description: "lightbox", event: action(() => this._isLightboxOpen = true), icon: "eye" }); !existingVm && ContextMenu.Instance.addItem({ description: "View Modes...", subitems: subItems, icon: "eye" }); - let existing = ContextMenu.Instance.findByDescription("Layout..."); - let layoutItems = existing && "subitems" in existing ? existing.subitems : []; + const existing = ContextMenu.Instance.findByDescription("Layout..."); + const layoutItems = existing && "subitems" in existing ? existing.subitems : []; layoutItems.push({ description: `${this.props.Document.forceActive ? "Select" : "Force"} Contents Active`, event: () => this.props.Document.forceActive = !this.props.Document.forceActive, icon: "project-diagram" }); !existing && ContextMenu.Instance.addItem({ description: "Layout...", subitems: layoutItems, icon: "hand-point-right" }); - let more = ContextMenu.Instance.findByDescription("More..."); - let moreItems = more && "subitems" in more ? more.subitems : []; + const more = ContextMenu.Instance.findByDescription("More..."); + const moreItems = more && "subitems" in more ? more.subitems : []; moreItems.push({ description: "Export Image Hierarchy", icon: "columns", event: () => ImageUtils.ExportHierarchyToFileSystem(this.props.Document) }); !more && ContextMenu.Instance.addItem({ description: "More...", subitems: moreItems, icon: "hand-point-right" }); } diff --git a/src/client/views/collections/CollectionViewChromes.tsx b/src/client/views/collections/CollectionViewChromes.tsx index 06fca7c38..4161e5d6e 100644 --- a/src/client/views/collections/CollectionViewChromes.tsx +++ b/src/client/views/collections/CollectionViewChromes.tsx @@ -13,7 +13,6 @@ import { DragManager } from "../../util/DragManager"; import { undoBatch } from "../../util/UndoManager"; import { EditableView } from "../EditableView"; import { COLLECTION_BORDER_WIDTH } from "../globalCssVariables.scss"; -import { DocLike } from "../MetadataEntryMenu"; import { CollectionViewType } from "./CollectionView"; import { CollectionView } from "./CollectionView"; import "./CollectionViewChromes.scss"; @@ -33,7 +32,7 @@ interface Filter { contains: boolean; } -let stopPropagation = (e: React.SyntheticEvent) => e.stopPropagation(); +const stopPropagation = (e: React.SyntheticEvent) => e.stopPropagation(); @observer export class CollectionViewBaseChrome extends React.Component { @@ -80,11 +79,11 @@ export class CollectionViewBaseChrome extends React.Component { - let re: any = /(!)?\(\(\(doc\.(\w+)\s+&&\s+\(doc\.\w+\s+as\s+\w+\)\.includes\(\"(\w+)\"\)/g; - let arr: any[] = re.exec(script); - let toReturn: Filter[] = []; + const re: any = /(!)?\(\(\(doc\.(\w+)\s+&&\s+\(doc\.\w+\s+as\s+\w+\)\.includes\(\"(\w+)\"\)/g; + const arr: any[] = re.exec(script); + const toReturn: Filter[] = []; if (arr !== null) { - let filter: Filter = { + const filter: Filter = { key: arr[2], value: arr[3], contains: (arr[1] === "!") ? false : true, @@ -120,14 +119,14 @@ export class CollectionViewBaseChrome extends React.Component { this.addKeyRestrictions(fields); // chrome status is one of disabled, collapsed, or visible. this determines initial state from document - let chromeStatus = this.props.CollectionView.props.Document.chromeStatus; + const chromeStatus = this.props.CollectionView.props.Document.chromeStatus; if (chromeStatus) { if (chromeStatus === "disabled") { throw new Error("how did you get here, if chrome status is 'disabled' on a collection, a chrome shouldn't even be instantiated!"); @@ -183,7 +182,7 @@ export class CollectionViewBaseChrome extends React.Component { - let index = this._keyRestrictions.length; + const index = this._keyRestrictions.length; this._keyRestrictions.push([ runInAction(() => this._keyRestrictions[index][1] = value)} />, ""]); this.openViewSpecs(e); @@ -194,26 +193,26 @@ export class CollectionViewBaseChrome extends React.Component i[1]).filter(i => i.length > 0).join(" && ") + ")"; - let yearOffset = this._dateWithinValue[1] === 'y' ? 1 : 0; - let monthOffset = this._dateWithinValue[1] === 'm' ? parseInt(this._dateWithinValue[0]) : 0; - let weekOffset = this._dateWithinValue[1] === 'w' ? parseInt(this._dateWithinValue[0]) : 0; - let dayOffset = (this._dateWithinValue[1] === 'd' ? parseInt(this._dateWithinValue[0]) : 0) + weekOffset * 7; + const keyRestrictionScript = "(" + this._keyRestrictions.map(i => i[1]).filter(i => i.length > 0).join(" && ") + ")"; + const yearOffset = this._dateWithinValue[1] === 'y' ? 1 : 0; + const monthOffset = this._dateWithinValue[1] === 'm' ? parseInt(this._dateWithinValue[0]) : 0; + const weekOffset = this._dateWithinValue[1] === 'w' ? parseInt(this._dateWithinValue[0]) : 0; + const dayOffset = (this._dateWithinValue[1] === 'd' ? parseInt(this._dateWithinValue[0]) : 0) + weekOffset * 7; let dateRestrictionScript = ""; if (this._dateValue instanceof Date) { - let lowerBound = new Date(this._dateValue.getFullYear() - yearOffset, this._dateValue.getMonth() - monthOffset, this._dateValue.getDate() - dayOffset); - let upperBound = new Date(this._dateValue.getFullYear() + yearOffset, this._dateValue.getMonth() + monthOffset, this._dateValue.getDate() + dayOffset + 1); + const lowerBound = new Date(this._dateValue.getFullYear() - yearOffset, this._dateValue.getMonth() - monthOffset, this._dateValue.getDate() - dayOffset); + const upperBound = new Date(this._dateValue.getFullYear() + yearOffset, this._dateValue.getMonth() + monthOffset, this._dateValue.getDate() + dayOffset + 1); dateRestrictionScript = `((doc.creationDate as any).date >= ${lowerBound.valueOf()} && (doc.creationDate as any).date <= ${upperBound.valueOf()})`; } else { - let createdDate = new Date(this._dateValue); + const createdDate = new Date(this._dateValue); if (!isNaN(createdDate.getTime())) { - let lowerBound = new Date(createdDate.getFullYear() - yearOffset, createdDate.getMonth() - monthOffset, createdDate.getDate() - dayOffset); - let upperBound = new Date(createdDate.getFullYear() + yearOffset, createdDate.getMonth() + monthOffset, createdDate.getDate() + dayOffset + 1); + const lowerBound = new Date(createdDate.getFullYear() - yearOffset, createdDate.getMonth() - monthOffset, createdDate.getDate() - dayOffset); + const upperBound = new Date(createdDate.getFullYear() + yearOffset, createdDate.getMonth() + monthOffset, createdDate.getDate() + dayOffset + 1); dateRestrictionScript = `((doc.creationDate as any).date >= ${lowerBound.valueOf()} && (doc.creationDate as any).date <= ${upperBound.valueOf()})`; } } - let fullScript = dateRestrictionScript.length || keyRestrictionScript.length ? dateRestrictionScript.length ? + const fullScript = dateRestrictionScript.length || keyRestrictionScript.length ? dateRestrictionScript.length ? `${dateRestrictionScript} ${keyRestrictionScript.length ? "&&" : ""} (${keyRestrictionScript})` : `(${keyRestrictionScript}) ${dateRestrictionScript.length ? "&&" : ""} ${dateRestrictionScript}` : "true"; @@ -270,7 +269,7 @@ export class CollectionViewBaseChrome extends React.Component) => this.pivotKeyDisplay = e.currentTarget.value)} onKeyPress={action((e: React.KeyboardEvent) => { - let value = e.currentTarget.value; + const value = e.currentTarget.value; if (e.which === 13) { this.pivotKey = value; this.pivotKeyDisplay = ""; @@ -357,7 +356,7 @@ export class CollectionViewBaseChrome extends React.Component { e.stopPropagation(); e.preventDefault(); - let [dx, dy] = [e.clientX - this._startDragPosition.x, e.clientY - this._startDragPosition.y]; + const [dx, dy] = [e.clientX - this._startDragPosition.x, e.clientY - this._startDragPosition.y]; if (Math.abs(dx) + Math.abs(dy) > this._sensitivity) { this._buttonizableCommands.filter(c => c.title === this._currentKey).map(c => DragManager.StartButtonDrag([this._commandRef.current!], c.script, c.title, @@ -373,7 +372,7 @@ export class CollectionViewBaseChrome extends React.Component
    @@ -480,7 +479,7 @@ export class CollectionStackingViewChrome extends React.Component => { value = value.toLowerCase(); - let docs = DocListCast(this.props.CollectionView.props.Document[this.props.CollectionView.props.fieldKey]); + const docs = DocListCast(this.props.CollectionView.props.Document[this.props.CollectionView.props.fieldKey]); if (docs instanceof Doc) { return Object.keys(docs).filter(key => key.toLowerCase().startsWith(value)); } else { @@ -571,31 +570,31 @@ export class CollectionSchemaViewChrome extends React.Component { - let dividerWidth = 4; - let borderWidth = Number(COLLECTION_BORDER_WIDTH); - let panelWidth = this.props.CollectionView.props.PanelWidth(); - let previewWidth = NumCast(this.props.CollectionView.props.Document.schemaPreviewWidth); - let tableWidth = panelWidth - 2 * borderWidth - dividerWidth - previewWidth; + const dividerWidth = 4; + const borderWidth = Number(COLLECTION_BORDER_WIDTH); + const panelWidth = this.props.CollectionView.props.PanelWidth(); + const previewWidth = NumCast(this.props.CollectionView.props.Document.schemaPreviewWidth); + const tableWidth = panelWidth - 2 * borderWidth - dividerWidth - previewWidth; this.props.CollectionView.props.Document.schemaPreviewWidth = previewWidth === 0 ? Math.min(tableWidth / 3, 200) : 0; } @undoBatch @action toggleTextwrap = async () => { - let textwrappedRows = Cast(this.props.CollectionView.props.Document.textwrappedSchemaRows, listSpec("string"), []); + const textwrappedRows = Cast(this.props.CollectionView.props.Document.textwrappedSchemaRows, listSpec("string"), []); if (textwrappedRows.length) { this.props.CollectionView.props.Document.textwrappedSchemaRows = new List([]); } else { - let docs = DocListCast(this.props.CollectionView.props.Document[this.props.CollectionView.props.fieldKey]); - let allRows = docs instanceof Doc ? [docs[Id]] : docs.map(doc => doc[Id]); + const docs = DocListCast(this.props.CollectionView.props.Document[this.props.CollectionView.props.fieldKey]); + const allRows = docs instanceof Doc ? [docs[Id]] : docs.map(doc => doc[Id]); this.props.CollectionView.props.Document.textwrappedSchemaRows = new List(allRows); } } render() { - let previewWidth = NumCast(this.props.CollectionView.props.Document.schemaPreviewWidth); - let textWrapped = Cast(this.props.CollectionView.props.Document.textwrappedSchemaRows, listSpec("string"), []).length > 0; + const previewWidth = NumCast(this.props.CollectionView.props.Document.schemaPreviewWidth); + const textWrapped = Cast(this.props.CollectionView.props.Document.textwrappedSchemaRows, listSpec("string"), []).length > 0; return (
    diff --git a/src/client/views/collections/KeyRestrictionRow.tsx b/src/client/views/collections/KeyRestrictionRow.tsx index e35b7d7d3..f3071b316 100644 --- a/src/client/views/collections/KeyRestrictionRow.tsx +++ b/src/client/views/collections/KeyRestrictionRow.tsx @@ -1,8 +1,6 @@ import * as React from "react"; import { observable, runInAction } from "mobx"; import { observer } from "mobx-react"; -import { PastelSchemaPalette } from "../../../new_fields/SchemaHeaderField"; -import { Doc } from "../../../new_fields/Doc"; interface IKeyRestrictionProps { contains: boolean; @@ -20,13 +18,13 @@ export default class KeyRestrictionRow extends React.Component { } async fetchDocuments() { - let aliases = (await SearchUtil.GetAliasesOfDocument(this.props.Document)).filter(doc => doc !== this.props.Document); + const aliases = (await SearchUtil.GetAliasesOfDocument(this.props.Document)).filter(doc => doc !== this.props.Document); const { docs } = await SearchUtil.Search("", true, { fq: `data_l:"${this.props.Document[Id]}"` }); const map: Map = new Map; const allDocs = await Promise.all(aliases.map(doc => SearchUtil.Search("", true, { fq: `data_l:"${doc[Id]}"` }).then(result => result.docs))); @@ -129,7 +129,7 @@ export class ButtonSelector extends React.Component<{ Document: Doc, Stack: any render() { let flyout; if (this.hover) { - let view = DocumentManager.Instance.getDocumentView(this.props.Document); + const view = DocumentManager.Instance.getDocumentView(this.props.Document); flyout = !view ? (null) : (
    diff --git a/src/client/views/collections/collectionFreeForm/CollectionFreeFormLayoutEngines.tsx b/src/client/views/collections/collectionFreeForm/CollectionFreeFormLayoutEngines.tsx index 4a32c1647..012115b1f 100644 --- a/src/client/views/collections/collectionFreeForm/CollectionFreeFormLayoutEngines.tsx +++ b/src/client/views/collections/collectionFreeForm/CollectionFreeFormLayoutEngines.tsx @@ -77,7 +77,7 @@ export function computePivotLayout(poolData: ObservableMap, pivotDo fontSize: NumCast(pivotDoc.pivotFontSize, 10) }); for (const doc of val) { - let layoutDoc = Doc.Layout(doc); + const layoutDoc = Doc.Layout(doc); let wid = pivotAxisWidth; let hgt = layoutDoc.nativeWidth ? (NumCast(layoutDoc.nativeHeight) / NumCast(layoutDoc.nativeWidth)) * pivotAxisWidth : pivotAxisWidth; if (hgt > pivotAxisWidth) { @@ -100,7 +100,7 @@ export function computePivotLayout(poolData: ObservableMap, pivotDo }); childPairs.map(pair => { - let defaultPosition = { + const defaultPosition = { x: NumCast(pair.layout.x), y: NumCast(pair.layout.y), z: NumCast(pair.layout.z), @@ -108,7 +108,7 @@ export function computePivotLayout(poolData: ObservableMap, pivotDo height: NumCast(pair.layout.height) }; const pos = docMap.get(pair.layout) || defaultPosition; - let data = poolData.get(pair.layout[Id]); + const data = poolData.get(pair.layout[Id]); if (!data || pos.x !== data.x || pos.y !== data.y || pos.z !== data.z || pos.width !== data.width || pos.height !== data.height) { runInAction(() => poolData.set(pair.layout[Id], { transition: "transform 1s", ...pos })); } @@ -118,10 +118,10 @@ export function computePivotLayout(poolData: ObservableMap, pivotDo export function AddCustomFreeFormLayout(doc: Doc, dataKey: string): () => void { return () => { - let addOverlay = (key: "arrangeScript" | "arrangeInit", options: OverlayElementOptions, params?: Record, requiredType?: string) => { + const addOverlay = (key: "arrangeScript" | "arrangeInit", options: OverlayElementOptions, params?: Record, requiredType?: string) => { let overlayDisposer: () => void = emptyFunction; // filled in below after we have a reference to the scriptingBox const scriptField = Cast(doc[key], ScriptField); - let scriptingBox = overlayDisposer()} // don't get rid of the function wrapper-- we don't want to use the current value of overlayDiposer, but the one set below onSave={(text, onError) => { diff --git a/src/client/views/collections/collectionFreeForm/CollectionFreeFormLinkView.tsx b/src/client/views/collections/collectionFreeForm/CollectionFreeFormLinkView.tsx index b00728079..5e4b4fd27 100644 --- a/src/client/views/collections/collectionFreeForm/CollectionFreeFormLinkView.tsx +++ b/src/client/views/collections/collectionFreeForm/CollectionFreeFormLinkView.tsx @@ -6,9 +6,8 @@ import "./CollectionFreeFormLinkView.scss"; import React = require("react"); import v5 = require("uuid/v5"); import { DocumentType } from "../../../documents/DocumentTypes"; -import { observable, action, reaction, IReactionDisposer, trace } from "mobx"; -import { StrCast, Cast } from "../../../../new_fields/Types"; -import { TraceMobx } from "../../../../new_fields/util"; +import { observable, action, reaction, IReactionDisposer } from "mobx"; +import { StrCast } from "../../../../new_fields/Types"; export interface CollectionFreeFormLinkViewProps { A: DocumentView; @@ -26,22 +25,22 @@ export class CollectionFreeFormLinkView extends React.Component { setTimeout(action(() => this._opacity = 1), 0); // since the render code depends on querying the Dom through getBoudndingClientRect, we need to delay triggering render() setTimeout(action(() => this._opacity = 0.05), 750); // this will unhighlight the link line. - let acont = this.props.A.props.Document.type === DocumentType.LINK ? this.props.A.ContentDiv!.getElementsByClassName("docuLinkBox-cont") : []; - let bcont = this.props.B.props.Document.type === DocumentType.LINK ? this.props.B.ContentDiv!.getElementsByClassName("docuLinkBox-cont") : []; - let adiv = (acont.length ? acont[0] : this.props.A.ContentDiv!); - let bdiv = (bcont.length ? bcont[0] : this.props.B.ContentDiv!); - let a = adiv.getBoundingClientRect(); - let b = bdiv.getBoundingClientRect(); - let abounds = adiv.parentElement!.getBoundingClientRect(); - let bbounds = bdiv.parentElement!.getBoundingClientRect(); - let apt = Utils.closestPtBetweenRectangles(abounds.left, abounds.top, abounds.width, abounds.height, + const acont = this.props.A.props.Document.type === DocumentType.LINK ? this.props.A.ContentDiv!.getElementsByClassName("docuLinkBox-cont") : []; + const bcont = this.props.B.props.Document.type === DocumentType.LINK ? this.props.B.ContentDiv!.getElementsByClassName("docuLinkBox-cont") : []; + const adiv = (acont.length ? acont[0] : this.props.A.ContentDiv!); + const bdiv = (bcont.length ? bcont[0] : this.props.B.ContentDiv!); + const a = adiv.getBoundingClientRect(); + const b = bdiv.getBoundingClientRect(); + const abounds = adiv.parentElement!.getBoundingClientRect(); + const bbounds = bdiv.parentElement!.getBoundingClientRect(); + const apt = Utils.closestPtBetweenRectangles(abounds.left, abounds.top, abounds.width, abounds.height, bbounds.left, bbounds.top, bbounds.width, bbounds.height, a.left + a.width / 2, a.top + a.height / 2); - let bpt = Utils.closestPtBetweenRectangles(bbounds.left, bbounds.top, bbounds.width, bbounds.height, + const bpt = Utils.closestPtBetweenRectangles(bbounds.left, bbounds.top, bbounds.width, bbounds.height, abounds.left, abounds.top, abounds.width, abounds.height, apt.point.x, apt.point.y); - let afield = StrCast(this.props.A.props.Document[StrCast(this.props.A.props.layoutKey, "layout")]).indexOf("anchor1") === -1 ? "anchor2" : "anchor1"; - let bfield = afield === "anchor1" ? "anchor2" : "anchor1"; + const afield = StrCast(this.props.A.props.Document[StrCast(this.props.A.props.layoutKey, "layout")]).indexOf("anchor1") === -1 ? "anchor2" : "anchor1"; + const bfield = afield === "anchor1" ? "anchor2" : "anchor1"; this.props.A.props.Document[afield + "_x"] = (apt.point.x - abounds.left) / abounds.width * 100; this.props.A.props.Document[afield + "_y"] = (apt.point.y - abounds.top) / abounds.height * 100; this.props.A.props.Document[bfield + "_x"] = (bpt.point.x - bbounds.left) / bbounds.width * 100; @@ -55,18 +54,18 @@ export class CollectionFreeFormLinkView extends React.Component { + const connections = DocumentManager.Instance.LinkedDocumentViews.reduce((drawnPairs, connection) => { if (!drawnPairs.reduce((found, drawnPair) => { - let match1 = (connection.a === drawnPair.a && connection.b === drawnPair.b); - let match2 = (connection.a === drawnPair.b && connection.b === drawnPair.a); - let match = match1 || match2; + const match1 = (connection.a === drawnPair.a && connection.b === drawnPair.b); + const match2 = (connection.a === drawnPair.b && connection.b === drawnPair.a); + const match = match1 || match2; if (match && !drawnPair.l.reduce((found, link) => found || link[Id] === connection.l[Id], false)) { drawnPair.l.push(connection.l); } diff --git a/src/client/views/collections/collectionFreeForm/CollectionFreeFormRemoteCursors.tsx b/src/client/views/collections/collectionFreeForm/CollectionFreeFormRemoteCursors.tsx index b8148852d..bb9ae4326 100644 --- a/src/client/views/collections/collectionFreeForm/CollectionFreeFormRemoteCursors.tsx +++ b/src/client/views/collections/collectionFreeForm/CollectionFreeFormRemoteCursors.tsx @@ -13,14 +13,14 @@ import v5 = require("uuid/v5"); export class CollectionFreeFormRemoteCursors extends React.Component { protected getCursors(): CursorField[] { - let doc = this.props.Document; + const doc = this.props.Document; - let id = CurrentUserUtils.id; + const id = CurrentUserUtils.id; if (!id) { return []; } - let cursors = Cast(doc.cursors, listSpec(CursorField)); + const cursors = Cast(doc.cursors, listSpec(CursorField)); const now = mobxUtils.now(); // const now = Date.now(); @@ -30,7 +30,7 @@ export class CollectionFreeFormRemoteCursors extends React.Component { if (this.crosshairs) { - let ctx = this.crosshairs.getContext('2d'); + const ctx = this.crosshairs.getContext('2d'); if (ctx) { ctx.fillStyle = backgroundColor; ctx.fillRect(0, 0, 20, 20); @@ -62,8 +62,8 @@ export class CollectionFreeFormRemoteCursors extends React.Component { - let m = c.data.metadata; - let l = c.data.position; + const m = c.data.metadata; + const l = c.data.position; this.drawCrosshairs("#" + v5(m.id, v5.URL).substring(0, 6).toUpperCase() + "22"); return (
    Transform.Identity().scale(1 / this.zoomScaling()).translate(this.panX(), this.panY()); private addLiveTextBox = (newBox: Doc) => { FormattedTextBox.SelectOnLoad = newBox[Id];// track the new text box so we can give it a prop that tells it to focus itself when it's displayed - let maxHeading = this.childDocs.reduce((maxHeading, doc) => NumCast(doc.heading) > maxHeading ? NumCast(doc.heading) : maxHeading, 0); + const maxHeading = this.childDocs.reduce((maxHeading, doc) => NumCast(doc.heading) > maxHeading ? NumCast(doc.heading) : maxHeading, 0); let heading = maxHeading === 0 || this.childDocs.length === 0 ? 1 : maxHeading === 1 ? 2 : 0; if (heading === 0) { - let sorted = this.childDocs.filter(d => d.type === DocumentType.TEXT && d.data_ext instanceof Doc && d.data_ext.lastModified).sort((a, b) => DateCast((Cast(a.data_ext, Doc) as Doc).lastModified).date > DateCast((Cast(b.data_ext, Doc) as Doc).lastModified).date ? 1 : + const sorted = this.childDocs.filter(d => d.type === DocumentType.TEXT && d.data_ext instanceof Doc && d.data_ext.lastModified).sort((a, b) => DateCast((Cast(a.data_ext, Doc) as Doc).lastModified).date > DateCast((Cast(b.data_ext, Doc) as Doc).lastModified).date ? 1 : DateCast((Cast(a.data_ext, Doc) as Doc).lastModified).date < DateCast((Cast(b.data_ext, Doc) as Doc).lastModified).date ? -1 : 0); heading = !sorted.length ? Math.max(1, maxHeading) : NumCast(sorted[sorted.length - 1].heading) === 1 ? 2 : NumCast(sorted[sorted.length - 1].heading); } @@ -109,7 +110,7 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { this.addDocument(newBox); } private addDocument = (newBox: Doc) => { - let added = this.props.addDocument(newBox); + const added = this.props.addDocument(newBox); added && this.bringToFront(newBox); added && this.updateCluster(newBox); return added; @@ -126,36 +127,36 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { @action onDrop = (e: React.DragEvent): Promise => { - var pt = this.getTransform().transformPoint(e.pageX, e.pageY); + const pt = this.getTransform().transformPoint(e.pageX, e.pageY); return super.onDrop(e, { x: pt[0], y: pt[1] }); } @undoBatch @action drop = (e: Event, de: DragManager.DropEvent) => { - let xf = this.getTransform(); - let xfo = this.getTransformOverlay(); - let [xp, yp] = xf.transformPoint(de.x, de.y); - let [xpo, ypo] = xfo.transformPoint(de.x, de.y); + const xf = this.getTransform(); + const xfo = this.getTransformOverlay(); + const [xp, yp] = xf.transformPoint(de.x, de.y); + const [xpo, ypo] = xfo.transformPoint(de.x, de.y); if (super.drop(e, de)) { if (de.data instanceof DragManager.DocumentDragData) { if (de.data.droppedDocuments.length) { - let firstDoc = de.data.droppedDocuments[0]; - let z = NumCast(firstDoc.z); - let x = (z ? xpo : xp) - de.data.offset[0]; - let y = (z ? ypo : yp) - de.data.offset[1]; - let dropX = NumCast(firstDoc.x); - let dropY = NumCast(firstDoc.y); + const firstDoc = de.data.droppedDocuments[0]; + const z = NumCast(firstDoc.z); + const x = (z ? xpo : xp) - de.data.offset[0]; + const y = (z ? ypo : yp) - de.data.offset[1]; + const dropX = NumCast(firstDoc.x); + const dropY = NumCast(firstDoc.y); de.data.droppedDocuments.forEach(action((d: Doc) => { - let layoutDoc = Doc.Layout(d); + const layoutDoc = Doc.Layout(d); d.x = x + NumCast(d.x) - dropX; d.y = y + NumCast(d.y) - dropY; if (!NumCast(layoutDoc.width)) { layoutDoc.width = 300; } if (!NumCast(layoutDoc.height)) { - let nw = NumCast(layoutDoc.nativeWidth); - let nh = NumCast(layoutDoc.nativeHeight); + const nw = NumCast(layoutDoc.nativeWidth); + const nh = NumCast(layoutDoc.nativeHeight); layoutDoc.height = nw && nh ? nh / nw * NumCast(layoutDoc.width) : 300; } this.bringToFront(d); @@ -166,11 +167,11 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { } else if (de.data instanceof DragManager.AnnotationDragData) { if (de.data.dropDocument) { - let dragDoc = de.data.dropDocument; - let x = xp - de.data.offset[0]; - let y = yp - de.data.offset[1]; - let dropX = NumCast(dragDoc.x); - let dropY = NumCast(dragDoc.y); + const dragDoc = de.data.dropDocument; + const x = xp - de.data.offset[0]; + const y = yp - de.data.offset[1]; + const dropX = NumCast(dragDoc.x); + const dropY = NumCast(dragDoc.y); dragDoc.x = x + NumCast(dragDoc.x) - dropX; dragDoc.y = y + NumCast(dragDoc.y) - dropY; de.data.targetContext = this.props.Document; // dropped a PDF annotation, so we need to set the targetContext on the dragData which the PDF view uses at the end of the drop operation @@ -183,23 +184,23 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { pickCluster(probe: number[]) { return this.childLayoutPairs.map(pair => pair.layout).reduce((cluster, cd) => { - let layoutDoc = Doc.Layout(cd); - let cx = NumCast(cd.x) - this._clusterDistance; - let cy = NumCast(cd.y) - this._clusterDistance; - let cw = NumCast(layoutDoc.width) + 2 * this._clusterDistance; - let ch = NumCast(layoutDoc.height) + 2 * this._clusterDistance; + const layoutDoc = Doc.Layout(cd); + const cx = NumCast(cd.x) - this._clusterDistance; + const cy = NumCast(cd.y) - this._clusterDistance; + const cw = NumCast(layoutDoc.width) + 2 * this._clusterDistance; + const ch = NumCast(layoutDoc.height) + 2 * this._clusterDistance; return !layoutDoc.z && intersectRect({ left: cx, top: cy, width: cw, height: ch }, { left: probe[0], top: probe[1], width: 1, height: 1 }) ? NumCast(cd.cluster) : cluster; }, -1); } tryDragCluster(e: PointerEvent | TouchEvent) { - let ptsParent = e instanceof PointerEvent ? e : e.targetTouches.item(0); + const ptsParent = e instanceof PointerEvent ? e : e.targetTouches.item(0); if (ptsParent) { - let cluster = this.pickCluster(this.getTransform().transformPoint(ptsParent.clientX, ptsParent.clientY)); + const cluster = this.pickCluster(this.getTransform().transformPoint(ptsParent.clientX, ptsParent.clientY)); if (cluster !== -1) { - let eles = this.childLayoutPairs.map(pair => pair.layout).filter(cd => NumCast(cd.cluster) === cluster); - let clusterDocs = eles.map(ele => DocumentManager.Instance.getDocumentView(ele, this.props.CollectionView)!); - let de = new DragManager.DocumentDragData(eles); + const eles = this.childLayoutPairs.map(pair => pair.layout).filter(cd => NumCast(cd.cluster) === cluster); + const clusterDocs = eles.map(ele => DocumentManager.Instance.getDocumentView(ele, this.props.CollectionView)!); + const de = new DragManager.DocumentDragData(eles); de.moveDocument = this.props.moveDocument; const [left, top] = clusterDocs[0].props.ScreenToLocalTransform().scale(clusterDocs[0].props.ContentScaling()).inverse().transformPoint(0, 0); de.offset = this.getTransform().transformDirection(ptsParent.clientX - left, ptsParent.clientY - top); @@ -225,10 +226,10 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { @undoBatch @action updateCluster(doc: Doc) { - let childLayouts = this.childLayoutPairs.map(pair => pair.layout); + const childLayouts = this.childLayoutPairs.map(pair => pair.layout); if (this.props.Document.useClusters) { this._clusterSets.map(set => Doc.IndexOf(doc, set) !== -1 && set.splice(Doc.IndexOf(doc, set), 1)); - let preferredInd = NumCast(doc.cluster); + const preferredInd = NumCast(doc.cluster); doc.cluster = -1; this._clusterSets.map((set, i) => set.map(member => { if (doc.cluster === -1 && Doc.IndexOf(member, childLayouts) !== -1 && Doc.overlapping(doc, member, this._clusterDistance)) { @@ -255,15 +256,15 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { getClusterColor = (doc: Doc) => { let clusterColor = ""; - let cluster = NumCast(doc.cluster); + const cluster = NumCast(doc.cluster); if (this.Document.useClusters) { if (this._clusterSets.length <= cluster) { setTimeout(() => this.updateCluster(doc), 0); } else { // choose a cluster color from a palette - let colors = ["#da42429e", "#31ea318c", "#8c4000", "#4a7ae2c4", "#d809ff", "#ff7601", "#1dffff", "yellow", "#1b8231f2", "#000000ad"]; + const colors = ["#da42429e", "#31ea318c", "#8c4000", "#4a7ae2c4", "#d809ff", "#ff7601", "#1dffff", "yellow", "#1b8231f2", "#000000ad"]; clusterColor = colors[cluster % colors.length]; - let set = this._clusterSets[cluster] && this._clusterSets[cluster].filter(s => s.backgroundColor && (s.backgroundColor !== s.defaultBackgroundColor)); + const set = this._clusterSets[cluster] && this._clusterSets[cluster].filter(s => s.backgroundColor && (s.backgroundColor !== s.defaultBackgroundColor)); // override the cluster color with an explicitly set color on a non-background document. then override that with an explicitly set color on a background document set && set.filter(s => !s.isBackground).map(s => clusterColor = StrCast(s.backgroundColor)); set && set.filter(s => s.isBackground).map(s => clusterColor = StrCast(s.backgroundColor)); @@ -287,7 +288,7 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { if (InteractionUtils.IsType(e, InteractionUtils.PENTYPE) || (InkingControl.Instance.selectedTool === InkTool.Highlighter || InkingControl.Instance.selectedTool === InkTool.Pen)) { e.stopPropagation(); e.preventDefault(); - let point = this.getTransform().transformPoint(e.pageX, e.pageY); + const point = this.getTransform().transformPoint(e.pageX, e.pageY); this._points.push({ x: point[0], y: point[1] }); } // if not using a pen and in no ink mode @@ -324,7 +325,7 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { @action handle1PointerDown = (e: React.TouchEvent) => { - let pt = e.targetTouches.item(0); + const pt = e.targetTouches.item(0); if (pt) { this._hitCluster = this.props.Document.useCluster ? this.pickCluster(this.getTransform().transformPoint(pt.clientX, pt.clientY)) !== -1 : false; } @@ -335,9 +336,9 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { if (InteractionUtils.IsType(e, InteractionUtils.TOUCHTYPE) && this._points.length <= 1) return; if (this._points.length > 1) { - let B = this.svgBounds; - let points = this._points.map(p => ({ x: p.x - B.left, y: p.y - B.top })); - let inkDoc = Docs.Create.InkDocument(InkingControl.Instance.selectedColor, InkingControl.Instance.selectedTool, parseInt(InkingControl.Instance.selectedWidth), points, { width: B.width, height: B.height, x: B.left, y: B.top }); + const B = this.svgBounds; + const points = this._points.map(p => ({ x: p.x - B.left, y: p.y - B.top })); + const inkDoc = Docs.Create.InkDocument(InkingControl.Instance.selectedColor, InkingControl.Instance.selectedTool, parseInt(InkingControl.Instance.selectedWidth), points, { width: B.width, height: B.height, x: B.left, y: B.top }); this.addDocument(inkDoc); this._points = []; } @@ -355,26 +356,26 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { let x = this.Document.panX || 0; let y = this.Document.panY || 0; - let docs = this.childLayoutPairs.map(pair => pair.layout); - let [dx, dy] = this.getTransform().transformDirection(e.clientX - this._lastX, e.clientY - this._lastY); + const docs = this.childLayoutPairs.map(pair => pair.layout); + const [dx, dy] = this.getTransform().transformDirection(e.clientX - this._lastX, e.clientY - this._lastY); if (!this.isAnnotationOverlay) { PDFMenu.Instance.fadeOut(true); - let minx = docs.length ? NumCast(docs[0].x) : 0; - let maxx = docs.length ? NumCast(docs[0].width) + minx : minx; - let miny = docs.length ? NumCast(docs[0].y) : 0; - let maxy = docs.length ? NumCast(docs[0].height) + miny : miny; - let ranges = docs.filter(doc => doc).reduce((range, doc) => { - let layoutDoc = Doc.Layout(doc); - let x = NumCast(doc.x); - let xe = x + NumCast(layoutDoc.width); - let y = NumCast(doc.y); - let ye = y + NumCast(layoutDoc.height); + const minx = docs.length ? NumCast(docs[0].x) : 0; + const maxx = docs.length ? NumCast(docs[0].width) + minx : minx; + const miny = docs.length ? NumCast(docs[0].y) : 0; + const maxy = docs.length ? NumCast(docs[0].height) + miny : miny; + const ranges = docs.filter(doc => doc).reduce((range, doc) => { + const layoutDoc = Doc.Layout(doc); + const x = NumCast(doc.x); + const xe = x + NumCast(layoutDoc.width); + const y = NumCast(doc.y); + const ye = y + NumCast(layoutDoc.height); return [[range[0][0] > x ? x : range[0][0], range[0][1] < xe ? xe : range[0][1]], [range[1][0] > y ? y : range[1][0], range[1][1] < ye ? ye : range[1][1]]]; }, [[minx, maxx], [miny, maxy]]); - let cscale = this.props.ContainingCollectionDoc ? NumCast(this.props.ContainingCollectionDoc.scale) : 1; - let panelDim = this.props.ScreenToLocalTransform().transformDirection(this.props.PanelWidth() / this.zoomScaling() * cscale, + const cscale = this.props.ContainingCollectionDoc ? NumCast(this.props.ContainingCollectionDoc.scale) : 1; + const panelDim = this.props.ScreenToLocalTransform().transformDirection(this.props.PanelWidth() / this.zoomScaling() * cscale, this.props.PanelHeight() / this.zoomScaling() * cscale); if (ranges[0][0] - dx > (this.panX() + panelDim[0] / 2)) x = ranges[0][1] + panelDim[0] / 2; if (ranges[0][1] - dx < (this.panX() - panelDim[0] / 2)) x = ranges[0][0] - panelDim[0] / 2; @@ -397,7 +398,7 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { if (!e.cancelBubble) { const selectedTool = InkingControl.Instance.selectedTool; if (selectedTool === InkTool.Highlighter || selectedTool === InkTool.Pen || InteractionUtils.IsType(e, InteractionUtils.PENTYPE)) { - let point = this.getTransform().transformPoint(e.clientX, e.clientY); + const point = this.getTransform().transformPoint(e.clientX, e.clientY); this._points.push({ x: point[0], y: point[1] }); } else if (selectedTool === InkTool.None) { @@ -418,7 +419,7 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { handle1PointerMove = (e: TouchEvent) => { // panning a workspace if (!e.cancelBubble) { - let pt = e.targetTouches.item(0); + const pt = e.targetTouches.item(0); if (pt) { if (InkingControl.Instance.selectedTool === InkTool.None) { if (this._hitCluster && this.tryDragCluster(e)) { @@ -431,7 +432,7 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { this.pan(pt); } else if (InkingControl.Instance.selectedTool !== InkTool.Eraser && InkingControl.Instance.selectedTool !== InkTool.Scrubber) { - let point = this.getTransform().transformPoint(pt.clientX, pt.clientY); + const point = this.getTransform().transformPoint(pt.clientX, pt.clientY); this._points.push({ x: point[0], y: point[1] }); } } @@ -443,28 +444,28 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { handle2PointersMove = (e: TouchEvent) => { // pinch zooming if (!e.cancelBubble) { - let pt1: Touch | null = e.targetTouches.item(0); - let pt2: Touch | null = e.targetTouches.item(1); + const pt1: Touch | null = e.targetTouches.item(0); + const pt2: Touch | null = e.targetTouches.item(1); if (!pt1 || !pt2) return; if (this.prevPoints.size === 2) { - let oldPoint1 = this.prevPoints.get(pt1.identifier); - let oldPoint2 = this.prevPoints.get(pt2.identifier); + const oldPoint1 = this.prevPoints.get(pt1.identifier); + const oldPoint2 = this.prevPoints.get(pt2.identifier); if (oldPoint1 && oldPoint2) { - let dir = InteractionUtils.Pinching(pt1, pt2, oldPoint1, oldPoint2); + const dir = InteractionUtils.Pinching(pt1, pt2, oldPoint1, oldPoint2); // if zooming, zoom if (dir !== 0) { - let d1 = Math.sqrt(Math.pow(pt1.clientX - oldPoint1.clientX, 2) + Math.pow(pt1.clientY - oldPoint1.clientY, 2)); - let d2 = Math.sqrt(Math.pow(pt2.clientX - oldPoint2.clientX, 2) + Math.pow(pt2.clientY - oldPoint2.clientY, 2)); - let centerX = Math.min(pt1.clientX, pt2.clientX) + Math.abs(pt2.clientX - pt1.clientX) / 2; - let centerY = Math.min(pt1.clientY, pt2.clientY) + Math.abs(pt2.clientY - pt1.clientY) / 2; + const d1 = Math.sqrt(Math.pow(pt1.clientX - oldPoint1.clientX, 2) + Math.pow(pt1.clientY - oldPoint1.clientY, 2)); + const d2 = Math.sqrt(Math.pow(pt2.clientX - oldPoint2.clientX, 2) + Math.pow(pt2.clientY - oldPoint2.clientY, 2)); + const centerX = Math.min(pt1.clientX, pt2.clientX) + Math.abs(pt2.clientX - pt1.clientX) / 2; + const centerY = Math.min(pt1.clientY, pt2.clientY) + Math.abs(pt2.clientY - pt1.clientY) / 2; // calculate the raw delta value - let rawDelta = (dir * (d1 + d2)); + const rawDelta = (dir * (d1 + d2)); // this floors and ceils the delta value to prevent jitteriness - let delta = Math.sign(rawDelta) * Math.min(Math.abs(rawDelta), 16); + const delta = Math.sign(rawDelta) * Math.min(Math.abs(rawDelta), 16); this.zoom(centerX, centerY, delta); this.prevPoints.set(pt1.identifier, pt1); this.prevPoints.set(pt2.identifier, pt2); @@ -472,8 +473,8 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { // this is not zooming. derive some form of panning from it. else { // use the centerx and centery as the "new mouse position" - let centerX = Math.min(pt1.clientX, pt2.clientX) + Math.abs(pt2.clientX - pt1.clientX) / 2; - let centerY = Math.min(pt1.clientY, pt2.clientY) + Math.abs(pt2.clientY - pt1.clientY) / 2; + const centerX = Math.min(pt1.clientX, pt2.clientX) + Math.abs(pt2.clientX - pt1.clientX) / 2; + const centerY = Math.min(pt1.clientY, pt2.clientY) + Math.abs(pt2.clientY - pt1.clientY) / 2; this.pan({ clientX: centerX, clientY: centerY }); this._lastX = centerX; this._lastY = centerY; @@ -486,12 +487,12 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { } handle2PointersDown = (e: React.TouchEvent) => { - let pt1: React.Touch | null = e.targetTouches.item(0); - let pt2: React.Touch | null = e.targetTouches.item(1); + const pt1: React.Touch | null = e.targetTouches.item(0); + const pt2: React.Touch | null = e.targetTouches.item(1); if (!pt1 || !pt2) return; - let centerX = Math.min(pt1.clientX, pt2.clientX) + Math.abs(pt2.clientX - pt1.clientX) / 2; - let centerY = Math.min(pt1.clientY, pt2.clientY) + Math.abs(pt2.clientY - pt1.clientY) / 2; + const centerX = Math.min(pt1.clientX, pt2.clientX) + Math.abs(pt2.clientX - pt1.clientX) / 2; + const centerY = Math.min(pt1.clientY, pt2.clientY) + Math.abs(pt2.clientY - pt1.clientY) / 2; this._lastX = centerX; this._lastY = centerY; } @@ -510,11 +511,11 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { deltaScale = 1 / this.zoomScaling(); } if (deltaScale < 0) deltaScale = -deltaScale; - let [x, y] = this.getTransform().transformPoint(pointX, pointY); - let localTransform = this.getLocalTransform().inverse().scaleAbout(deltaScale, x, y); + const [x, y] = this.getTransform().transformPoint(pointX, pointY); + const localTransform = this.getLocalTransform().inverse().scaleAbout(deltaScale, x, y); if (localTransform.Scale >= 0.15) { - let safeScale = Math.min(Math.max(0.15, localTransform.Scale), 40); + const safeScale = Math.min(Math.max(0.15, localTransform.Scale), 40); this.props.Document.scale = Math.abs(safeScale); this.setPan(-localTransform.TranslateX / safeScale, -localTransform.TranslateY / safeScale); } @@ -536,7 +537,7 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { setPan(panX: number, panY: number, panType: string = "None") { if (!this.Document.lockedTransform || this.Document.inOverlay) { this.Document.panTransformType = panType; - var scale = this.getLocalTransform().inverse().Scale; + const scale = this.getLocalTransform().inverse().Scale; const newPanX = Math.min((1 - 1 / scale) * this.nativeWidth, Math.max(0, panX)); const newPanY = Math.min((this.props.Document.scrollHeight !== undefined ? NumCast(this.Document.scrollHeight) : (1 - 1 / scale) * this.nativeHeight), Math.max(0, panY)); this.Document.panX = this.isAnnotationOverlay ? newPanX : panX; @@ -576,23 +577,23 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { } SelectionManager.DeselectAll(); if (this.props.Document.scrollHeight) { - let annotOn = Cast(doc.annotationOn, Doc) as Doc; + const annotOn = Cast(doc.annotationOn, Doc) as Doc; if (!annotOn) { this.props.focus(doc); } else { - let contextHgt = Doc.AreProtosEqual(annotOn, this.props.Document) && this.props.VisibleHeight ? this.props.VisibleHeight() : NumCast(annotOn.height); - let offset = annotOn && (contextHgt / 2 * 96 / 72); + const contextHgt = Doc.AreProtosEqual(annotOn, this.props.Document) && this.props.VisibleHeight ? this.props.VisibleHeight() : NumCast(annotOn.height); + const offset = annotOn && (contextHgt / 2 * 96 / 72); this.props.Document.scrollY = NumCast(doc.y) - offset; } } else { - let layoutdoc = Doc.Layout(doc); + const layoutdoc = Doc.Layout(doc); const newPanX = NumCast(doc.x) + NumCast(layoutdoc.width) / 2; const newPanY = NumCast(doc.y) + NumCast(layoutdoc.height) / 2; const newState = HistoryUtil.getState(); newState.initializers![this.Document[Id]] = { panX: newPanX, panY: newPanY }; HistoryUtil.pushState(newState); - let savedState = { px: this.Document.panX, py: this.Document.panY, s: this.Document.scale, pt: this.Document.panTransformType }; + const savedState = { px: this.Document.panX, py: this.Document.panY, s: this.Document.scale, pt: this.Document.panTransformType }; this.setPan(newPanX, newPanY, "Ease"); Doc.BrushDoc(this.props.Document); @@ -677,7 +678,7 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { } } - childDataProvider = computedFn(function childDataProvider(doc: Doc) { return (this as any)._layoutPoolData.get(doc[Id]); }.bind(this)); + childDataProvider = computedFn(function childDataProvider(this: any, doc: Doc) { return this._layoutPoolData.get(doc[Id]); }.bind(this)); doPivotLayout(poolData: ObservableMap) { return computePivotLayout(poolData, this.props.Document, this.childDocs, @@ -685,10 +686,10 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { } doFreeformLayout(poolData: ObservableMap) { - let layoutDocs = this.childLayoutPairs.map(pair => pair.layout); + const layoutDocs = this.childLayoutPairs.map(pair => pair.layout); const initResult = this.Document.arrangeInit && this.Document.arrangeInit.script.run({ docs: layoutDocs, collection: this.Document }, console.log); let state = initResult && initResult.success ? initResult.result.scriptState : undefined; - let elements = initResult && initResult.success ? this.viewDefsToJSX(initResult.result.views) : []; + const elements = initResult && initResult.success ? this.viewDefsToJSX(initResult.result.views) : []; this.childLayoutPairs.filter(pair => this.isCurrent(pair.layout)).map((pair, i) => { const data = poolData.get(pair.layout[Id]); @@ -737,7 +738,7 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { layoutDocsInGrid = () => { UndoManager.RunInBatch(() => { const docs = DocListCast(this.Document[this.props.fieldKey]); - let startX = this.Document.panX || 0; + const startX = this.Document.panX || 0; let x = startX; let y = this.Document.panY || 0; let i = 0; @@ -762,8 +763,8 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { this.Document.isRuleProvider && this.childLayoutPairs.map(pair => // iterate over the children of a displayed document (or if the displayed document is a template, iterate over the children of that template) DocListCast(Doc.Layout(pair.layout).data).map(heading => { - let headingPair = Doc.GetLayoutDataDocPair(this.props.Document, this.props.DataDoc, this.props.fieldKey, heading); - let headingLayout = headingPair.layout && (pair.layout.data_ext instanceof Doc) && (pair.layout.data_ext[`Layout[${headingPair.layout[Id]}]`] as Doc) || headingPair.layout; + const headingPair = Doc.GetLayoutDataDocPair(this.props.Document, this.props.DataDoc, this.props.fieldKey, heading); + const headingLayout = headingPair.layout && (pair.layout.data_ext instanceof Doc) && (pair.layout.data_ext[`Layout[${headingPair.layout[Id]}]`] as Doc) || headingPair.layout; if (headingLayout && NumCast(headingLayout.heading) > 0 && headingLayout.backgroundColor !== headingLayout.defaultBackgroundColor) { Doc.GetProto(this.props.Document)["ruleColor_" + NumCast(headingLayout.heading)] = headingLayout.backgroundColor; } @@ -772,11 +773,23 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { } analyzeStrokes = async () => { - // CognitiveServices.Inking.Appliers.ConcatenateHandwriting(this.dataDoc, ["inkAnalysis", "handwriting"], data.inkData); + const children = await DocListCastAsync(this.dataDoc.data); + if (!children) { + return; + } + const inkData: InkData[] = []; + for (const doc of children) { + const data = Cast(doc.data, InkField)?.inkData; + data && inkData.push(data); + } + if (!inkData.length) { + return; + } + CognitiveServices.Inking.Appliers.ConcatenateHandwriting(this.dataDoc, ["inkAnalysis", "handwriting"], inkData); } onContextMenu = (e: React.MouseEvent) => { - let layoutItems: ContextMenuProps[] = []; + const layoutItems: ContextMenuProps[] = []; if (this.childDocs.some(d => BoolCast(d.isTemplateDoc))) { layoutItems.push({ description: "Template Layout Instance", event: () => this.props.addDocTab(Doc.ApplyTemplate(this.props.Document)!, undefined, "onRight"), icon: "project-diagram" }); @@ -795,7 +808,7 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { input.accept = ".zip"; input.onchange = async _e => { const upload = Utils.prepend("/uploadDoc"); - let formData = new FormData(); + const formData = new FormData(); const file = input.files && input.files[0]; if (file) { formData.append('file', file); @@ -830,7 +843,7 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { private childViews = () => { - let children = typeof this.props.children === "function" ? (this.props.children as any)() as JSX.Element[] : []; + const children = typeof this.props.children === "function" ? (this.props.children as any)() as JSX.Element[] : []; return [ ...children, ...this.views, @@ -838,12 +851,12 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { } @computed get svgBounds() { - let xs = this._points.map(p => p.x); - let ys = this._points.map(p => p.y); - let right = Math.max(...xs); - let left = Math.min(...xs); - let bottom = Math.max(...ys); - let top = Math.min(...ys); + const xs = this._points.map(p => p.x); + const ys = this._points.map(p => p.y); + const right = Math.max(...xs); + const left = Math.min(...xs); + const bottom = Math.max(...ys); + const top = Math.min(...ys); return { right: right, left: left, bottom: bottom, top: top, width: right - left, height: bottom - top }; } @@ -852,7 +865,7 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { return (null); } - let B = this.svgBounds; + const B = this.svgBounds; return ( @@ -862,7 +875,7 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { } children = () => { - let eles: JSX.Element[] = []; + const eles: JSX.Element[] = []; this.extensionDoc && (eles.push(...this.childViews())); this.currentStroke && (eles.push(this.currentStroke)); eles.push(); @@ -917,7 +930,7 @@ interface CollectionFreeFormViewPannableContentsProps { @observer class CollectionFreeFormViewPannableContents extends React.Component{ render() { - let freeformclass = "collectionfreeformview" + (this.props.easing() ? "-ease" : "-none"); + const freeformclass = "collectionfreeformview" + (this.props.easing() ? "-ease" : "-none"); const cenx = this.props.centeringShiftX(); const ceny = this.props.centeringShiftY(); const panx = -this.props.panX(); diff --git a/src/client/views/collections/collectionFreeForm/MarqueeOptionsMenu.tsx b/src/client/views/collections/collectionFreeForm/MarqueeOptionsMenu.tsx index 28ddc19d7..32e39d25e 100644 --- a/src/client/views/collections/collectionFreeForm/MarqueeOptionsMenu.tsx +++ b/src/client/views/collections/collectionFreeForm/MarqueeOptionsMenu.tsx @@ -21,7 +21,7 @@ export default class MarqueeOptionsMenu extends AntimodeMenu { } render() { - let buttons = [ + const buttons = [
    this.onOptionClick(groupType, false)}>{groupType}
    ; }); // if search term does not already exist as a group type, give option to create new group type if (!exactFound && this._searchTerm !== "") { - let ref = React.createRef(); + const ref = React.createRef(); options.push(
    this.onOptionClick(this._searchTerm, true)}>Define new "{this._searchTerm}" relationship
    ); } @@ -138,10 +138,10 @@ class LinkMetadataEditor extends React.Component { @action setMetadataKey = (value: string): void => { - let groupMdKeys = LinkManager.Instance.getMetadataKeysInGroup(this.props.groupType); + const groupMdKeys = LinkManager.Instance.getMetadataKeysInGroup(this.props.groupType); // don't allow user to create existing key - let newIndex = groupMdKeys.findIndex(key => key.toUpperCase() === value.toUpperCase()); + const newIndex = groupMdKeys.findIndex(key => key.toUpperCase() === value.toUpperCase()); if (newIndex > -1) { this._keyError = true; this._key = value; @@ -151,7 +151,7 @@ class LinkMetadataEditor extends React.Component { } // set new value for key - let currIndex = groupMdKeys.findIndex(key => { + const currIndex = groupMdKeys.findIndex(key => { return StrCast(key).toUpperCase() === this._key.toUpperCase(); }); if (currIndex === -1) console.error("LinkMetadataEditor: key was not found"); @@ -172,9 +172,9 @@ class LinkMetadataEditor extends React.Component { @action removeMetadata = (): void => { - let groupMdKeys = LinkManager.Instance.getMetadataKeysInGroup(this.props.groupType); + const groupMdKeys = LinkManager.Instance.getMetadataKeysInGroup(this.props.groupType); - let index = groupMdKeys.findIndex(key => key.toUpperCase() === this._key.toUpperCase()); + const index = groupMdKeys.findIndex(key => key.toUpperCase() === this._key.toUpperCase()); if (index === -1) console.error("LinkMetadataEditor: key was not found"); groupMdKeys.splice(index, 1); @@ -206,7 +206,7 @@ export class LinkGroupEditor extends React.Component { constructor(props: LinkGroupEditorProps) { super(props); - let groupMdKeys = LinkManager.Instance.getMetadataKeysInGroup(StrCast(props.groupDoc.type)); + const groupMdKeys = LinkManager.Instance.getMetadataKeysInGroup(StrCast(props.groupDoc.type)); groupMdKeys.forEach(key => { this._metadataIds.set(key, Utils.GenerateGuid()); }); @@ -226,25 +226,25 @@ export class LinkGroupEditor extends React.Component { } copyGroup = async (groupType: string): Promise => { - let sourceGroupDoc = this.props.groupDoc; + const sourceGroupDoc = this.props.groupDoc; const sourceMdDoc = await Cast(sourceGroupDoc.metadata, Doc); if (!sourceMdDoc) return; - let destDoc = LinkManager.Instance.getOppositeAnchor(this.props.linkDoc, this.props.sourceDoc); + const destDoc = LinkManager.Instance.getOppositeAnchor(this.props.linkDoc, this.props.sourceDoc); // let destGroupList = LinkManager.Instance.getAnchorGroups(this.props.linkDoc, destDoc); - let keys = LinkManager.Instance.getMetadataKeysInGroup(groupType); + const keys = LinkManager.Instance.getMetadataKeysInGroup(groupType); // create new metadata doc with copied kvp - let destMdDoc = new Doc(); + const destMdDoc = new Doc(); destMdDoc.anchor1 = StrCast(sourceMdDoc.anchor2); destMdDoc.anchor2 = StrCast(sourceMdDoc.anchor1); keys.forEach(key => { - let val = sourceMdDoc[key] === undefined ? "" : StrCast(sourceMdDoc[key]); + const val = sourceMdDoc[key] === undefined ? "" : StrCast(sourceMdDoc[key]); destMdDoc[key] = val; }); // create new group doc with new metadata doc - let destGroupDoc = new Doc(); + const destGroupDoc = new Doc(); destGroupDoc.type = groupType; destGroupDoc.metadata = destMdDoc; @@ -256,7 +256,7 @@ export class LinkGroupEditor extends React.Component { @action addMetadata = (groupType: string): void => { this._metadataIds.set("new key", Utils.GenerateGuid()); - let mdKeys = LinkManager.Instance.getMetadataKeysInGroup(groupType); + const mdKeys = LinkManager.Instance.getMetadataKeysInGroup(groupType); // only add "new key" if there is no other key with value "new key"; prevents spamming if (mdKeys.indexOf("new key") === -1) mdKeys.push("new key"); LinkManager.Instance.setMetadataKeysForGroup(groupType, mdKeys); @@ -268,17 +268,17 @@ export class LinkGroupEditor extends React.Component { } renderMetadata = (): JSX.Element[] => { - let metadata: Array = []; - let groupDoc = this.props.groupDoc; + const metadata: Array = []; + const groupDoc = this.props.groupDoc; const mdDoc = FieldValue(Cast(groupDoc.metadata, Doc)); if (!mdDoc) { return []; } - let groupType = StrCast(groupDoc.type); - let groupMdKeys = LinkManager.Instance.getMetadataKeysInGroup(groupType); + const groupType = StrCast(groupDoc.type); + const groupMdKeys = LinkManager.Instance.getMetadataKeysInGroup(groupType); groupMdKeys.forEach((key) => { - let val = StrCast(mdDoc[key]); + const val = StrCast(mdDoc[key]); metadata.push( ); @@ -287,18 +287,18 @@ export class LinkGroupEditor extends React.Component { } viewGroupAsTable = (groupType: string): JSX.Element => { - let keys = LinkManager.Instance.getMetadataKeysInGroup(groupType); - let index = keys.indexOf(""); + const keys = LinkManager.Instance.getMetadataKeysInGroup(groupType); + const index = keys.indexOf(""); if (index > -1) keys.splice(index, 1); - let cols = ["anchor1", "anchor2", ...[...keys]].map(c => new SchemaHeaderField(c, "#f1efeb")); - let docs: Doc[] = LinkManager.Instance.getAllMetadataDocsInGroup(groupType); - let createTable = action(() => Docs.Create.SchemaDocument(cols, docs, { width: 500, height: 300, title: groupType + " table" })); - let ref = React.createRef(); + const cols = ["anchor1", "anchor2", ...[...keys]].map(c => new SchemaHeaderField(c, "#f1efeb")); + const docs: Doc[] = LinkManager.Instance.getAllMetadataDocsInGroup(groupType); + const createTable = action(() => Docs.Create.SchemaDocument(cols, docs, { width: 500, height: 300, title: groupType + " table" })); + const ref = React.createRef(); return
    ; } render() { - let groupType = StrCast(this.props.groupDoc.type); + const groupType = StrCast(this.props.groupDoc.type); // if ((groupType && LinkManager.Instance.getMetadataKeysInGroup(groupType).length > 0) || groupType === "") { let buttons; if (groupType === "") { @@ -356,15 +356,15 @@ export class LinkEditor extends React.Component { @action addGroup = (): void => { // create new metadata document for group - let mdDoc = new Doc(); + const mdDoc = new Doc(); mdDoc.anchor1 = this.props.sourceDoc.title; - let opp = LinkManager.Instance.getOppositeAnchor(this.props.linkDoc, this.props.sourceDoc); + const opp = LinkManager.Instance.getOppositeAnchor(this.props.linkDoc, this.props.sourceDoc); if (opp) { mdDoc.anchor2 = opp.title; } // create new group document - let groupDoc = new Doc(); + const groupDoc = new Doc(); groupDoc.type = ""; groupDoc.metadata = mdDoc; @@ -372,10 +372,10 @@ export class LinkEditor extends React.Component { } render() { - let destination = LinkManager.Instance.getOppositeAnchor(this.props.linkDoc, this.props.sourceDoc); + const destination = LinkManager.Instance.getOppositeAnchor(this.props.linkDoc, this.props.sourceDoc); - let groupList = LinkManager.Instance.getAnchorGroups(this.props.linkDoc, this.props.sourceDoc); - let groups = groupList.map(groupDoc => { + const groupList = LinkManager.Instance.getAnchorGroups(this.props.linkDoc, this.props.sourceDoc); + const groups = groupList.map(groupDoc => { return ; }); diff --git a/src/client/views/linking/LinkFollowBox.tsx b/src/client/views/linking/LinkFollowBox.tsx index efe2c7f2a..29e167ff7 100644 --- a/src/client/views/linking/LinkFollowBox.tsx +++ b/src/client/views/linking/LinkFollowBox.tsx @@ -68,14 +68,14 @@ export class LinkFollowBox extends React.Component { this._contextDisposer = reaction( () => this.selectedContextString, async () => { - let ref = await DocServer.GetRefField(this.selectedContextString); + const ref = await DocServer.GetRefField(this.selectedContextString); runInAction(() => { if (ref instanceof Doc) { this.selectedContext = ref; } }); if (this.selectedContext instanceof Doc) { - let aliases = await SearchUtil.GetViewsOfDocument(this.selectedContext); + const aliases = await SearchUtil.GetViewsOfDocument(this.selectedContext); runInAction(() => { this.selectedContextAliases = aliases; }); } } @@ -90,8 +90,8 @@ export class LinkFollowBox extends React.Component { if (LinkFollowBox.destinationDoc && this.sourceView && this.sourceView.props.ContainingCollectionDoc) { runInAction(() => this.canPan = false); if (this.sourceView.props.ContainingCollectionDoc.viewType === CollectionViewType.Freeform) { - let docs = Cast(this.sourceView.props.ContainingCollectionDoc.data, listSpec(Doc), []); - let aliases = await SearchUtil.GetViewsOfDocument(Doc.GetProto(LinkFollowBox.destinationDoc)); + const docs = Cast(this.sourceView.props.ContainingCollectionDoc.data, listSpec(Doc), []); + const aliases = await SearchUtil.GetViewsOfDocument(Doc.GetProto(LinkFollowBox.destinationDoc)); aliases.forEach(alias => { if (docs.filter(doc => doc === alias).length > 0) { @@ -118,8 +118,8 @@ export class LinkFollowBox extends React.Component { async fetchDocuments() { if (LinkFollowBox.destinationDoc) { - let dest: Doc = LinkFollowBox.destinationDoc; - let aliases = await SearchUtil.GetViewsOfDocument(Doc.GetProto(dest)); + const dest: Doc = LinkFollowBox.destinationDoc; + const aliases = await SearchUtil.GetViewsOfDocument(Doc.GetProto(dest)); const { docs } = await SearchUtil.Search("", true, { fq: `data_l:"${dest[Id]}"` }); const map: Map = new Map; const allDocs = await Promise.all(aliases.map(doc => SearchUtil.Search("", true, { fq: `data_l:"${doc[Id]}"` }).then(result => result.docs))); @@ -128,7 +128,7 @@ export class LinkFollowBox extends React.Component { runInAction(async () => { this._docs = docs.filter(doc => !Doc.AreProtosEqual(doc, CollectionDockingView.Instance.props.Document)).map(doc => ({ col: doc, target: dest })); this._otherDocs = Array.from(map.entries()).filter(entry => !Doc.AreProtosEqual(entry[0], CollectionDockingView.Instance.props.Document)).map(([col, target]) => ({ col, target })); - let tcontext = LinkFollowBox.linkDoc && (await Cast(LinkFollowBox.linkDoc.anchor2Context, Doc)) as Doc; + const tcontext = LinkFollowBox.linkDoc && (await Cast(LinkFollowBox.linkDoc.anchor2Context, Doc)) as Doc; runInAction(() => tcontext && this._docs.splice(0, 0, { col: tcontext, target: dest })); }); } @@ -157,7 +157,7 @@ export class LinkFollowBox extends React.Component { @undoBatch openFullScreen = () => { if (LinkFollowBox.destinationDoc) { - let view = DocumentManager.Instance.getDocumentView(LinkFollowBox.destinationDoc); + const view = DocumentManager.Instance.getDocumentView(LinkFollowBox.destinationDoc); view && CollectionDockingView.Instance && CollectionDockingView.Instance.OpenFullScreen(view); } } @@ -171,7 +171,7 @@ export class LinkFollowBox extends React.Component { options.context.panX = newPanX; options.context.panY = newPanY; } - let view = DocumentManager.Instance.getDocumentView(options.context); + const view = DocumentManager.Instance.getDocumentView(options.context); view && CollectionDockingView.Instance && CollectionDockingView.Instance.OpenFullScreen(view); this.highlightDoc(); } @@ -211,7 +211,7 @@ export class LinkFollowBox extends React.Component { @undoBatch openLinkRight = () => { if (LinkFollowBox.destinationDoc) { - let alias = Doc.MakeAlias(LinkFollowBox.destinationDoc); + const alias = Doc.MakeAlias(LinkFollowBox.destinationDoc); (LinkFollowBox._addDocTab || this.props.addDocTab)(alias, undefined, "onRight"); this.highlightDoc(); SelectionManager.DeselectAll(); @@ -222,7 +222,7 @@ export class LinkFollowBox extends React.Component { @undoBatch jumpToLink = async (options: { shouldZoom: boolean }) => { if (LinkFollowBox.sourceDoc && LinkFollowBox.linkDoc) { - let focus = (document: Doc) => { (LinkFollowBox._addDocTab || this.props.addDocTab)(document, undefined, "inTab"); SelectionManager.DeselectAll(); }; + const focus = (document: Doc) => { (LinkFollowBox._addDocTab || this.props.addDocTab)(document, undefined, "inTab"); SelectionManager.DeselectAll(); }; //let focus = (doc: Doc, maxLocation: string) => this.props.focus(docthis.props.focus(LinkFollowBox.destinationDoc, true, 1, () => this.props.addDocTab(doc, undefined, maxLocation)); DocumentManager.Instance.FollowLink(LinkFollowBox.linkDoc, LinkFollowBox.sourceDoc, focus, options && options.shouldZoom, false, undefined); @@ -232,7 +232,7 @@ export class LinkFollowBox extends React.Component { @undoBatch openLinkTab = () => { if (LinkFollowBox.destinationDoc) { - let fullScreenAlias = Doc.MakeAlias(LinkFollowBox.destinationDoc); + const fullScreenAlias = Doc.MakeAlias(LinkFollowBox.destinationDoc); // this.prosp.addDocTab is empty -- use the link source's addDocTab (LinkFollowBox._addDocTab || this.props.addDocTab)(fullScreenAlias, undefined, "inTab"); @@ -264,14 +264,14 @@ export class LinkFollowBox extends React.Component { if (LinkFollowBox.destinationDoc && LinkFollowBox.sourceDoc) { if (this.sourceView && this.sourceView.props.addDocument) { - let destViews = DocumentManager.Instance.getDocumentViews(LinkFollowBox.destinationDoc); + const destViews = DocumentManager.Instance.getDocumentViews(LinkFollowBox.destinationDoc); if (!destViews.find(dv => dv.props.ContainingCollectionView === this.sourceView!.props.ContainingCollectionView)) { - let alias = Doc.MakeAlias(LinkFollowBox.destinationDoc); - let y = NumCast(LinkFollowBox.sourceDoc.y); - let x = NumCast(LinkFollowBox.sourceDoc.x); + const alias = Doc.MakeAlias(LinkFollowBox.destinationDoc); + const y = NumCast(LinkFollowBox.sourceDoc.y); + const x = NumCast(LinkFollowBox.sourceDoc.x); - let width = NumCast(LinkFollowBox.sourceDoc.width); - let height = NumCast(LinkFollowBox.sourceDoc.height); + const width = NumCast(LinkFollowBox.sourceDoc.width); + const height = NumCast(LinkFollowBox.sourceDoc.height); alias.x = x + width + 30; alias.y = y; @@ -301,8 +301,8 @@ export class LinkFollowBox extends React.Component { this.selectedContext = LinkFollowBox.destinationDoc; } if (this.selectedOption === "") this.selectedOption = FollowOptions.NOZOOM; - let shouldZoom: boolean = this.selectedOption === FollowOptions.NOZOOM ? false : true; - let notOpenInContext: boolean = this.selectedContextString === "self" || this.selectedContextString === LinkFollowBox.destinationDoc[Id]; + const shouldZoom: boolean = this.selectedOption === FollowOptions.NOZOOM ? false : true; + const notOpenInContext: boolean = this.selectedContextString === "self" || this.selectedContextString === LinkFollowBox.destinationDoc[Id]; if (this.selectedMode === FollowModes.INPLACE) { if (shouldZoom !== undefined) this.openLinkInPlace({ shouldZoom: shouldZoom }); @@ -328,7 +328,7 @@ export class LinkFollowBox extends React.Component { @action handleModeChange = (e: React.ChangeEvent) => { - let target = e.target as HTMLInputElement; + const target = e.target as HTMLInputElement; this.selectedMode = target.value; this.selectedContext = undefined; this.selectedContextString = ""; @@ -345,13 +345,13 @@ export class LinkFollowBox extends React.Component { @action handleOptionChange = (e: React.ChangeEvent) => { - let target = e.target as HTMLInputElement; + const target = e.target as HTMLInputElement; this.selectedOption = target.value; } @action handleContextChange = (e: React.ChangeEvent) => { - let target = e.target as HTMLInputElement; + const target = e.target as HTMLInputElement; this.selectedContextString = target.value; // selectedContext is updated in reaction this.selectedOption = ""; @@ -360,7 +360,7 @@ export class LinkFollowBox extends React.Component { @computed get canOpenInPlace() { if (this.sourceView && this.sourceView.props.ContainingCollectionDoc) { - let colDoc = this.sourceView.props.ContainingCollectionDoc; + const colDoc = this.sourceView.props.ContainingCollectionDoc; if (colDoc.viewType && colDoc.viewType === CollectionViewType.Freeform) return true; } return false; diff --git a/src/client/views/linking/LinkMenu.tsx b/src/client/views/linking/LinkMenu.tsx index 27af873b5..52628ba4c 100644 --- a/src/client/views/linking/LinkMenu.tsx +++ b/src/client/views/linking/LinkMenu.tsx @@ -34,7 +34,7 @@ export class LinkMenu extends React.Component { } renderAllGroups = (groups: Map>): Array => { - let linkItems: Array = []; + const linkItems: Array = []; groups.forEach((group, groupType) => { linkItems.push( { } render() { - let sourceDoc = this.props.docView.props.Document; - let groups: Map = LinkManager.Instance.getRelatedGroupedLinks(sourceDoc); + const sourceDoc = this.props.docView.props.Document; + const groups: Map = LinkManager.Instance.getRelatedGroupedLinks(sourceDoc); if (this._editingLink === undefined) { return (
    diff --git a/src/client/views/linking/LinkMenuGroup.tsx b/src/client/views/linking/LinkMenuGroup.tsx index 1891919ce..15aacbbc9 100644 --- a/src/client/views/linking/LinkMenuGroup.tsx +++ b/src/client/views/linking/LinkMenuGroup.tsx @@ -51,11 +51,11 @@ export class LinkMenuGroup extends React.Component { document.removeEventListener("pointermove", this.onLinkButtonMoved); document.removeEventListener("pointerup", this.onLinkButtonUp); - let draggedDocs = this.props.group.map(linkDoc => { - let opp = LinkManager.Instance.getOppositeAnchor(linkDoc, this.props.sourceDoc); + const draggedDocs = this.props.group.map(linkDoc => { + const opp = LinkManager.Instance.getOppositeAnchor(linkDoc, this.props.sourceDoc); if (opp) return opp; }) as Doc[]; - let dragData = new DragManager.DocumentDragData(draggedDocs); + const dragData = new DragManager.DocumentDragData(draggedDocs); DragManager.StartLinkedDocumentDrag([this._drag.current], dragData, e.x, e.y, { handlers: { @@ -69,19 +69,19 @@ export class LinkMenuGroup extends React.Component { } viewGroupAsTable = (groupType: string): JSX.Element => { - let keys = LinkManager.Instance.getMetadataKeysInGroup(groupType); - let index = keys.indexOf(""); + const keys = LinkManager.Instance.getMetadataKeysInGroup(groupType); + const index = keys.indexOf(""); if (index > -1) keys.splice(index, 1); - let cols = ["anchor1", "anchor2", ...[...keys]].map(c => new SchemaHeaderField(c, "#f1efeb")); - let docs: Doc[] = LinkManager.Instance.getAllMetadataDocsInGroup(groupType); - let createTable = action(() => Docs.Create.SchemaDocument(cols, docs, { width: 500, height: 300, title: groupType + " table" })); - let ref = React.createRef(); + const cols = ["anchor1", "anchor2", ...[...keys]].map(c => new SchemaHeaderField(c, "#f1efeb")); + const docs: Doc[] = LinkManager.Instance.getAllMetadataDocsInGroup(groupType); + const createTable = action(() => Docs.Create.SchemaDocument(cols, docs, { width: 500, height: 300, title: groupType + " table" })); + const ref = React.createRef(); return
    ; } render() { - let groupItems = this.props.group.map(linkDoc => { - let destination = LinkManager.Instance.getOppositeAnchor(linkDoc, this.props.sourceDoc); + const groupItems = this.props.group.map(linkDoc => { + const destination = LinkManager.Instance.getOppositeAnchor(linkDoc, this.props.sourceDoc); if (destination && this.props.sourceDoc) { return { } renderMetadata = (): JSX.Element => { - let groups = LinkManager.Instance.getAnchorGroups(this.props.linkDoc, this.props.sourceDoc); - let index = groups.findIndex(groupDoc => StrCast(groupDoc.type).toUpperCase() === this.props.groupType.toUpperCase()); - let groupDoc = index > -1 ? groups[index] : undefined; + const groups = LinkManager.Instance.getAnchorGroups(this.props.linkDoc, this.props.sourceDoc); + const index = groups.findIndex(groupDoc => StrCast(groupDoc.type).toUpperCase() === this.props.groupType.toUpperCase()); + const groupDoc = index > -1 ? groups[index] : undefined; let mdRows: Array = []; if (groupDoc) { - let mdDoc = Cast(groupDoc.metadata, Doc, null); + const mdDoc = Cast(groupDoc.metadata, Doc, null); if (mdDoc) { - let keys = LinkManager.Instance.getMetadataKeysInGroup(this.props.groupType);//groupMetadataKeys.get(this.props.groupType); + const keys = LinkManager.Instance.getMetadataKeysInGroup(this.props.groupType);//groupMetadataKeys.get(this.props.groupType); mdRows = keys.map(key => { return (
    {key}: {StrCast(mdDoc[key])}
    ); }); @@ -110,8 +110,8 @@ export class LinkMenuItem extends React.Component { render() { - let keys = LinkManager.Instance.getMetadataKeysInGroup(this.props.groupType);//groupMetadataKeys.get(this.props.groupType); - let canExpand = keys ? keys.length > 0 : false; + const keys = LinkManager.Instance.getMetadataKeysInGroup(this.props.groupType);//groupMetadataKeys.get(this.props.groupType); + const canExpand = keys ? keys.length > 0 : false; return (
    diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 77b10e395..95c765e8a 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -56,19 +56,19 @@ export class AudioBox extends DocExtendableComponent this.layoutDoc.scrollToLinkID, scrollLinkId => { scrollLinkId && DocListCast(this.dataDoc.links).filter(l => l[Id] === scrollLinkId).map(l => { - let la1 = l.anchor1 as Doc; - let linkTime = Doc.AreProtosEqual(la1, this.dataDoc) ? NumCast(l.anchor1Timecode) : NumCast(l.anchor2Timecode); + const la1 = l.anchor1 as Doc; + const linkTime = Doc.AreProtosEqual(la1, this.dataDoc) ? NumCast(l.anchor1Timecode) : NumCast(l.anchor2Timecode); setTimeout(() => { this.playFrom(linkTime); Doc.linkFollowHighlight(l); }, 250); }); scrollLinkId && Doc.SetInPlace(this.layoutDoc, "scrollToLinkID", undefined, false); }, { fireImmediately: true }); this._reactionDisposer = reaction(() => SelectionManager.SelectedDocuments(), selected => { - let sel = selected.length ? selected[0].props.Document : undefined; + const sel = selected.length ? selected[0].props.Document : undefined; this.Document.playOnSelect && sel && !Doc.AreProtosEqual(sel, this.props.Document) && this.playFrom(DateCast(sel.creationTime).date.getTime()); }); this._scrubbingDisposer = reaction(() => AudioBox._scrubTime, timeInMillisecondsFrom1970 => { - let start = this.extensionDoc && DateCast(this.extensionDoc.recordingStart); + const start = this.extensionDoc && DateCast(this.extensionDoc.recordingStart); start && this.playFrom((timeInMillisecondsFrom1970 - start.date.getTime()) / 1000); }); } @@ -127,7 +127,7 @@ export class AudioBox extends DocExtendableComponent { let gumStream: any; - let self = this; + const self = this; const extensionDoc = this.extensionDoc; extensionDoc && navigator.mediaDevices.getUserMedia({ audio: true @@ -160,7 +160,7 @@ export class AudioBox extends DocExtendableComponent { - let funcs: ContextMenuProps[] = []; + const funcs: ContextMenuProps[] = []; funcs.push({ description: (this.Document.playOnSelect ? "Don't play" : "Play") + " when document selected", event: () => this.Document.playOnSelect = !this.Document.playOnSelect, icon: "expand-arrows-alt" }); ContextMenu.Instance.addItem({ description: "Audio Funcs...", subitems: funcs, icon: "asterisk" }); @@ -170,7 +170,7 @@ export class AudioBox extends DocExtendableComponent Not supported. @@ -212,7 +212,7 @@ export class AudioBox extends DocExtendableComponent @@ -228,7 +228,7 @@ export class AudioBox extends DocExtendableComponent e.stopPropagation()} onPointerDown={e => { if (e.button === 0 && !e.ctrlKey) { - let rect = (e.target as any).getBoundingClientRect(); + const rect = (e.target as any).getBoundingClientRect(); this._ele!.currentTime = this.Document.currentTimecode = (e.clientX - rect.x) / rect.width * NumCast(this.dataDoc.duration); this.pause(); e.stopPropagation(); diff --git a/src/client/views/nodes/ButtonBox.tsx b/src/client/views/nodes/ButtonBox.tsx index 659ba154a..34151a311 100644 --- a/src/client/views/nodes/ButtonBox.tsx +++ b/src/client/views/nodes/ButtonBox.tsx @@ -51,10 +51,10 @@ export class ButtonBox extends DocComponent(Butt } specificContextMenu = (e: React.MouseEvent): void => { - let funcs: ContextMenuProps[] = []; + const funcs: ContextMenuProps[] = []; funcs.push({ description: "Clear Script Params", event: () => { - let params = FieldValue(this.Document.buttonParams); + const params = FieldValue(this.Document.buttonParams); params && params.map(p => this.props.Document[p] = undefined); }, icon: "trash" }); @@ -73,8 +73,8 @@ export class ButtonBox extends DocComponent(Butt } // (!missingParams || !missingParams.length ? "" : "(" + missingParams.map(m => m + ":").join(" ") + ")") render() { - let params = this.Document.buttonParams; - let missingParams = params && params.filter(p => this.props.Document[p] === undefined); + const params = this.Document.buttonParams; + const missingParams = params && params.filter(p => this.props.Document[p] === undefined); params && params.map(p => DocListCast(this.props.Document[p])); // bcz: really hacky form of prefetching ... return (
    { - let ruleRounding = this.props.ruleProvider ? StrCast(this.props.ruleProvider["ruleRounding_" + this.Document.heading]) : undefined; - let ld = this.layoutDoc[StrCast(this.layoutDoc.layoutKey, "layout")] instanceof Doc ? this.layoutDoc[StrCast(this.layoutDoc.layoutKey, "layout")] as Doc : undefined; - let br = StrCast((ld || this.props.Document).borderRounding); + const ruleRounding = this.props.ruleProvider ? StrCast(this.props.ruleProvider["ruleRounding_" + this.Document.heading]) : undefined; + const ld = this.layoutDoc[StrCast(this.layoutDoc.layoutKey, "layout")] instanceof Doc ? this.layoutDoc[StrCast(this.layoutDoc.layoutKey, "layout")] as Doc : undefined; + const br = StrCast((ld || this.props.Document).borderRounding); return !br && ruleRounding ? ruleRounding : br; } diff --git a/src/client/views/nodes/ContentFittingDocumentView.tsx b/src/client/views/nodes/ContentFittingDocumentView.tsx index a5f96d2de..efc907f9b 100644 --- a/src/client/views/nodes/ContentFittingDocumentView.tsx +++ b/src/client/views/nodes/ContentFittingDocumentView.tsx @@ -47,7 +47,7 @@ export class ContentFittingDocumentView extends React.Component { - let wscale = this.props.PanelWidth() / (this.nativeWidth ? this.nativeWidth : this.props.PanelWidth()); + const wscale = this.props.PanelWidth() / (this.nativeWidth ? this.nativeWidth : this.props.PanelWidth()); if (wscale * this.nativeHeight > this.props.PanelHeight()) { return this.props.PanelHeight() / (this.nativeHeight ? this.nativeHeight : this.props.PanelHeight()); } @@ -59,7 +59,7 @@ export class ContentFittingDocumentView extends React.Component { if (de.data instanceof DragManager.DocumentDragData) { this.props.childDocs && this.props.childDocs.map(otherdoc => { - let target = Doc.GetProto(otherdoc); + const target = Doc.GetProto(otherdoc); target.layout = ComputedField.MakeFunction("this.image_data[0]"); target.layoutCustom = Doc.MakeDelegate(de.data.draggedDocuments[0]); }); diff --git a/src/client/views/nodes/DocuLinkBox.tsx b/src/client/views/nodes/DocuLinkBox.tsx index d73407903..a22472e9e 100644 --- a/src/client/views/nodes/DocuLinkBox.tsx +++ b/src/client/views/nodes/DocuLinkBox.tsx @@ -36,12 +36,12 @@ export class DocuLinkBox extends DocComponent(Doc (e.button === 0 && !e.ctrlKey) && e.stopPropagation(); } onPointerMove = action((e: PointerEvent) => { - let cdiv = this._ref && this._ref.current && this._ref.current.parentElement; + const cdiv = this._ref && this._ref.current && this._ref.current.parentElement; if (cdiv && (Math.abs(e.clientX - this._downx) > 5 || Math.abs(e.clientY - this._downy) > 5)) { - let bounds = cdiv.getBoundingClientRect(); - let pt = Utils.getNearestPointInPerimeter(bounds.left, bounds.top, bounds.width, bounds.height, e.clientX, e.clientY); - let separation = Math.sqrt((pt[0] - e.clientX) * (pt[0] - e.clientX) + (pt[1] - e.clientY) * (pt[1] - e.clientY)); - let dragdist = Math.sqrt((pt[0] - this._downx) * (pt[0] - this._downx) + (pt[1] - this._downy) * (pt[1] - this._downy)); + const bounds = cdiv.getBoundingClientRect(); + const pt = Utils.getNearestPointInPerimeter(bounds.left, bounds.top, bounds.width, bounds.height, e.clientX, e.clientY); + const separation = Math.sqrt((pt[0] - e.clientX) * (pt[0] - e.clientX) + (pt[1] - e.clientY) * (pt[1] - e.clientY)); + const dragdist = Math.sqrt((pt[0] - this._downx) * (pt[0] - this._downx) + (pt[1] - this._downy) * (pt[1] - this._downy)); if (separation > 100) { DragLinksAsDocuments(this._ref.current!, pt[0], pt[1], Cast(this.props.Document[this.props.fieldKey], Doc) as Doc, this.props.Document); // Containging collection is the document, not a collection... hack. document.removeEventListener("pointermove", this.onPointerMove); @@ -67,14 +67,14 @@ export class DocuLinkBox extends DocComponent(Doc } render() { - let anchorDoc = Cast(this.props.Document[this.props.fieldKey], Doc); - let hasAnchor = anchorDoc instanceof Doc && anchorDoc.type === DocumentType.PDFANNO; - let y = NumCast(this.props.Document[this.props.fieldKey + "_y"], 100); - let x = NumCast(this.props.Document[this.props.fieldKey + "_x"], 100); - let c = StrCast(this.props.Document.backgroundColor, "lightblue"); - let anchor = this.props.fieldKey === "anchor1" ? "anchor2" : "anchor1"; - let timecode = this.props.Document[anchor + "Timecode"]; - let targetTitle = StrCast((this.props.Document[anchor]! as Doc).title) + (timecode !== undefined ? ":" + timecode : ""); + const anchorDoc = Cast(this.props.Document[this.props.fieldKey], Doc); + const hasAnchor = anchorDoc instanceof Doc && anchorDoc.type === DocumentType.PDFANNO; + const y = NumCast(this.props.Document[this.props.fieldKey + "_y"], 100); + const x = NumCast(this.props.Document[this.props.fieldKey + "_x"], 100); + const c = StrCast(this.props.Document.backgroundColor, "lightblue"); + const anchor = this.props.fieldKey === "anchor1" ? "anchor2" : "anchor1"; + const timecode = this.props.Document[anchor + "Timecode"]; + const targetTitle = StrCast((this.props.Document[anchor]! as Doc).title) + (timecode !== undefined ? ":" + timecode : ""); return
    obj.active = this.props.parentActive).omit, Document: this.layoutDoc, DataDoc: this.dataDoc, diff --git a/src/client/views/nodes/DocumentView.tsx b/src/client/views/nodes/DocumentView.tsx index 057b4eecd..63ce6233c 100644 --- a/src/client/views/nodes/DocumentView.tsx +++ b/src/client/views/nodes/DocumentView.tsx @@ -122,7 +122,7 @@ export class DocumentView extends DocComponent(Docu startDragging(x: number, y: number, dropAction: dropActionType, applyAsTemplate?: boolean) { if (this._mainCont.current) { - let dragData = new DragManager.DocumentDragData([this.props.Document]); + const dragData = new DragManager.DocumentDragData([this.props.Document]); const [left, top] = this.props.ScreenToLocalTransform().scale(this.props.ContentScaling()).inverse().transformPoint(0, 0); dragData.offset = this.props.ScreenToLocalTransform().scale(this.props.ContentScaling()).transformDirection(x - left, y - top); dragData.dropAction = dropAction; @@ -143,7 +143,7 @@ export class DocumentView extends DocComponent(Docu e.stopPropagation(); let preventDefault = true; if (this._doubleTap && this.props.renderDepth && !this.onClickHandler?.script) { // disable double-click to show full screen for things that have an on click behavior since clicking them twice can be misinterpreted as a double click - let fullScreenAlias = Doc.MakeAlias(this.props.Document); + const fullScreenAlias = Doc.MakeAlias(this.props.Document); if (StrCast(fullScreenAlias.layoutKey) !== "layoutCustom" && fullScreenAlias.layoutCustom !== undefined) { fullScreenAlias.layoutKey = "layoutCustom"; } @@ -166,9 +166,9 @@ export class DocumentView extends DocComponent(Docu } buttonClick = async (altKey: boolean, ctrlKey: boolean) => { - let maximizedDocs = await DocListCastAsync(this.Document.maximizedDocs); - let summarizedDocs = await DocListCastAsync(this.Document.summarizedDocs); - let linkDocs = LinkManager.Instance.getAllRelatedLinks(this.props.Document); + const maximizedDocs = await DocListCastAsync(this.Document.maximizedDocs); + const summarizedDocs = await DocListCastAsync(this.Document.summarizedDocs); + const linkDocs = LinkManager.Instance.getAllRelatedLinks(this.props.Document); let expandedDocs: Doc[] = []; expandedDocs = maximizedDocs ? [...maximizedDocs, ...expandedDocs] : expandedDocs; expandedDocs = summarizedDocs ? [...summarizedDocs, ...expandedDocs] : expandedDocs; @@ -179,7 +179,7 @@ export class DocumentView extends DocComponent(Docu maxLocation = this.Document.maximizeLocation = (!ctrlKey ? !altKey ? maxLocation : (maxLocation !== "inPlace" ? "inPlace" : "onRight") : (maxLocation !== "inPlace" ? "inPlace" : "inTab")); if (maxLocation === "inPlace") { expandedDocs.forEach(maxDoc => this.props.addDocument && this.props.addDocument(maxDoc)); - let scrpt = this.props.ScreenToLocalTransform().scale(this.props.ContentScaling()).inverse().transformPoint(NumCast(this.layoutDoc.width) / 2, NumCast(this.layoutDoc.height) / 2); + const scrpt = this.props.ScreenToLocalTransform().scale(this.props.ContentScaling()).inverse().transformPoint(NumCast(this.layoutDoc.width) / 2, NumCast(this.layoutDoc.height) / 2); DocumentManager.Instance.animateBetweenPoint(scrpt, expandedDocs); } else { expandedDocs.forEach(maxDoc => (!this.props.addDocTab(maxDoc, undefined, "close") && this.props.addDocTab(maxDoc, undefined, maxLocation))); @@ -278,7 +278,7 @@ export class DocumentView extends DocComponent(Docu fieldTemplate.heading = 1; fieldTemplate.autoHeight = true; - let docTemplate = Docs.Create.FreeformDocument([fieldTemplate], { title: doc.title + "_layout", width: width + 20, height: Math.max(100, height + 45) }); + const docTemplate = Docs.Create.FreeformDocument([fieldTemplate], { title: doc.title + "_layout", width: width + 20, height: Math.max(100, height + 45) }); Doc.MakeMetadataFieldTemplate(fieldTemplate, Doc.GetProto(docTemplate), true); Doc.ApplyTemplateTo(docTemplate, dataDoc || doc, "layoutCustom", undefined); @@ -324,10 +324,10 @@ export class DocumentView extends DocComponent(Docu @action onDrop = (e: React.DragEvent) => { - let text = e.dataTransfer.getData("text/plain"); + const text = e.dataTransfer.getData("text/plain"); if (!e.isDefaultPrevented() && text && text.startsWith("(Docu @undoBatch @action makeIntoPortal = async () => { - let anchors = await Promise.all(DocListCast(this.Document.links).map(async (d: Doc) => Cast(d.anchor2, Doc))); + const anchors = await Promise.all(DocListCast(this.Document.links).map(async (d: Doc) => Cast(d.anchor2, Doc))); if (!anchors.find(anchor2 => anchor2 && anchor2.title === this.Document.title + ".portal" ? true : false)) { - let portalID = (this.Document.title + ".portal").replace(/^-/, "").replace(/\([0-9]*\)$/, ""); + const portalID = (this.Document.title + ".portal").replace(/^-/, "").replace(/\([0-9]*\)$/, ""); DocServer.GetRefField(portalID).then(existingPortal => { - let portal = existingPortal instanceof Doc ? existingPortal : Docs.Create.FreeformDocument([], { width: (this.layoutDoc.width || 0) + 10, height: this.layoutDoc.height || 0, title: portalID }); + const portal = existingPortal instanceof Doc ? existingPortal : Docs.Create.FreeformDocument([], { width: (this.layoutDoc.width || 0) + 10, height: this.layoutDoc.height || 0, title: portalID }); DocUtils.MakeLink({ doc: this.props.Document, ctx: this.props.ContainingCollectionDoc }, { doc: portal }, portalID, "portal link"); this.Document.isButton = true; }); @@ -400,7 +400,7 @@ export class DocumentView extends DocComponent(Docu e.preventDefault(); const cm = ContextMenu.Instance; - let subitems: ContextMenuProps[] = []; + const subitems: ContextMenuProps[] = []; subitems.push({ description: "Open Full Screen", event: () => CollectionDockingView.Instance && CollectionDockingView.Instance.OpenFullScreen(this), icon: "desktop" }); subitems.push({ description: "Open Tab ", event: () => this.props.addDocTab(this.props.Document, this.props.DataDoc, "inTab"), icon: "folder" }); subitems.push({ description: "Open Right ", event: () => this.props.addDocTab(this.props.Document, this.props.DataDoc, "onRight"), icon: "caret-square-right" }); @@ -410,8 +410,8 @@ export class DocumentView extends DocComponent(Docu cm.addItem({ description: "Open...", subitems: subitems, icon: "external-link-alt" }); - let existingOnClick = ContextMenu.Instance.findByDescription("OnClick..."); - let onClicks: ContextMenuProps[] = existingOnClick && "subitems" in existingOnClick ? existingOnClick.subitems : []; + const existingOnClick = ContextMenu.Instance.findByDescription("OnClick..."); + const onClicks: ContextMenuProps[] = existingOnClick && "subitems" in existingOnClick ? existingOnClick.subitems : []; onClicks.push({ description: "Enter Portal", event: this.makeIntoPortal, icon: "window-restore" }); onClicks.push({ description: "Toggle Detail", event: () => this.Document.onClick = ScriptField.MakeScript("toggleDetail(this)"), icon: "window-restore" }); onClicks.push({ description: this.Document.ignoreClick ? "Select" : "Do Nothing", event: () => this.Document.ignoreClick = !this.Document.ignoreClick, icon: this.Document.ignoreClick ? "unlock" : "lock" }); @@ -425,7 +425,7 @@ export class DocumentView extends DocComponent(Docu }); !existingOnClick && cm.addItem({ description: "OnClick...", subitems: onClicks, icon: "hand-point-right" }); - let funcs: ContextMenuProps[] = []; + const funcs: ContextMenuProps[] = []; if (this.Document.onDragStart) { funcs.push({ description: "Drag an Alias", icon: "edit", event: () => this.Document.dragFactory && (this.Document.onDragStart = ScriptField.MakeFunction('getAlias(this.dragFactory)')) }); funcs.push({ description: "Drag a Copy", icon: "edit", event: () => this.Document.dragFactory && (this.Document.onDragStart = ScriptField.MakeFunction('getCopy(this.dragFactory, true)')) }); @@ -433,8 +433,8 @@ export class DocumentView extends DocComponent(Docu ContextMenu.Instance.addItem({ description: "OnDrag...", subitems: funcs, icon: "asterisk" }); } - let existing = ContextMenu.Instance.findByDescription("Layout..."); - let layoutItems: ContextMenuProps[] = existing && "subitems" in existing ? existing.subitems : []; + const existing = ContextMenu.Instance.findByDescription("Layout..."); + const layoutItems: ContextMenuProps[] = existing && "subitems" in existing ? existing.subitems : []; layoutItems.push({ description: this.Document.isBackground ? "As Foreground" : "As Background", event: this.makeBackground, icon: this.Document.lockedPosition ? "unlock" : "lock" }); if (this.props.DataDoc) { layoutItems.push({ description: "Make View of Metadata Field", event: () => Doc.MakeMetadataFieldTemplate(this.props.Document, this.props.DataDoc!), icon: "concierge-bell" }); @@ -453,8 +453,8 @@ export class DocumentView extends DocComponent(Docu } !existing && cm.addItem({ description: "Layout...", subitems: layoutItems, icon: "compass" }); - let more = ContextMenu.Instance.findByDescription("More..."); - let moreItems: ContextMenuProps[] = more && "subitems" in more ? more.subitems : []; + const more = ContextMenu.Instance.findByDescription("More..."); + const moreItems: ContextMenuProps[] = more && "subitems" in more ? more.subitems : []; if (!ClientUtils.RELEASE) { // let copies: ContextMenuProps[] = []; @@ -489,7 +489,7 @@ export class DocumentView extends DocComponent(Docu !more && cm.addItem({ description: "More...", subitems: moreItems, icon: "hand-point-right" }); runInAction(() => { if (!ClientUtils.RELEASE) { - let setWriteMode = (mode: DocServer.WriteMode) => { + const setWriteMode = (mode: DocServer.WriteMode) => { DocServer.AclsMode = mode; const mode1 = mode; const mode2 = mode === DocServer.WriteMode.Default ? mode : DocServer.WriteMode.Playground; @@ -503,7 +503,7 @@ export class DocumentView extends DocComponent(Docu DocServer.setFieldWriteMode("scale", mode2); DocServer.setFieldWriteMode("viewType", mode2); }; - let aclsMenu: ContextMenuProps[] = []; + const aclsMenu: ContextMenuProps[] = []; aclsMenu.push({ description: "Default (write/read all)", event: () => setWriteMode(DocServer.WriteMode.Default), icon: DocServer.AclsMode === DocServer.WriteMode.Default ? "check" : "exclamation" }); aclsMenu.push({ description: "Playground (write own/no read)", event: () => setWriteMode(DocServer.WriteMode.Playground), icon: DocServer.AclsMode === DocServer.WriteMode.Playground ? "check" : "exclamation" }); aclsMenu.push({ description: "Live Playground (write own/read others)", event: () => setWriteMode(DocServer.WriteMode.LivePlayground), icon: DocServer.AclsMode === DocServer.WriteMode.LivePlayground ? "check" : "exclamation" }); @@ -539,8 +539,8 @@ export class DocumentView extends DocComponent(Docu select = (ctrlPressed: boolean) => { SelectionManager.SelectDoc(this, ctrlPressed); }; chromeHeight = () => { - let showOverlays = this.props.showOverlays ? this.props.showOverlays(this.Document) : undefined; - let showTitle = showOverlays && "title" in showOverlays ? showOverlays.title : StrCast(this.Document.showTitle); + const showOverlays = this.props.showOverlays ? this.props.showOverlays(this.Document) : undefined; + const showTitle = showOverlays && "title" in showOverlays ? showOverlays.title : StrCast(this.Document.showTitle); return (showTitle ? 25 : 0) + 1; } @@ -585,8 +585,8 @@ export class DocumentView extends DocComponent(Docu // if it's a tempoarl link (currently just for Audio), then the audioBox will display the anchor and we don't want to display it here. // would be good to generalize this some way. isNonTemporalLink = (linkDoc: Doc) => { - let anchor = Cast(Doc.AreProtosEqual(this.props.Document, Cast(linkDoc.anchor1, Doc) as Doc) ? linkDoc.anchor1 : linkDoc.anchor2, Doc) as Doc; - let ept = Doc.AreProtosEqual(this.props.Document, Cast(linkDoc.anchor1, Doc) as Doc) ? linkDoc.anchor1Timecode : linkDoc.anchor2Timecode; + const anchor = Cast(Doc.AreProtosEqual(this.props.Document, Cast(linkDoc.anchor1, Doc) as Doc) ? linkDoc.anchor1 : linkDoc.anchor2, Doc) as Doc; + const ept = Doc.AreProtosEqual(this.props.Document, Cast(linkDoc.anchor1, Doc) as Doc) ? linkDoc.anchor1Timecode : linkDoc.anchor2Timecode; return anchor.type === DocumentType.AUDIO && NumCast(ept) ? false : true; } @@ -651,14 +651,14 @@ export class DocumentView extends DocComponent(Docu @action handle2PointersMove = (e: TouchEvent) => { - let pt1 = e.targetTouches.item(0); - let pt2 = e.targetTouches.item(1); + const pt1 = e.targetTouches.item(0); + const pt2 = e.targetTouches.item(1); if (pt1 && pt2 && this.prevPoints.has(pt1.identifier) && this.prevPoints.has(pt2.identifier)) { - let oldPoint1 = this.prevPoints.get(pt1.identifier); - let oldPoint2 = this.prevPoints.get(pt2.identifier); - let pinching = InteractionUtils.Pinning(pt1, pt2, oldPoint1!, oldPoint2!); + const oldPoint1 = this.prevPoints.get(pt1.identifier); + const oldPoint2 = this.prevPoints.get(pt2.identifier); + const pinching = InteractionUtils.Pinning(pt1, pt2, oldPoint1!, oldPoint2!); if (pinching !== 0) { - let newWidth = Math.max(Math.abs(oldPoint1!.clientX - oldPoint2!.clientX), Math.abs(pt1.clientX - pt2.clientX)); + const newWidth = Math.max(Math.abs(oldPoint1!.clientX - oldPoint2!.clientX), Math.abs(pt1.clientX - pt2.clientX)); this.props.Document.width = newWidth; } } @@ -679,12 +679,12 @@ export class DocumentView extends DocComponent(Docu const localScale = fullDegree; const animDims = this.Document.animateToDimensions ? Array.from(this.Document.animateToDimensions) : undefined; - let animheight = animDims ? animDims[1] : "100%"; - let animwidth = animDims ? animDims[0] : "100%"; + const animheight = animDims ? animDims[1] : "100%"; + const animwidth = animDims ? animDims[0] : "100%"; const highlightColors = ["transparent", "maroon", "maroon", "yellow", "magenta", "cyan", "orange"]; const highlightStyles = ["solid", "dashed", "solid", "solid", "solid", "solid", "solid"]; - let highlighting = fullDegree && this.layoutDoc.type !== DocumentType.FONTICON && this.layoutDoc.viewType !== CollectionViewType.Linear; + const highlighting = fullDegree && this.layoutDoc.type !== DocumentType.FONTICON && this.layoutDoc.viewType !== CollectionViewType.Linear; return
    Doc.BrushDoc(this.props.Document)} onPointerLeave={e => Doc.UnBrushDoc(this.props.Document)} diff --git a/src/client/views/nodes/FaceRectangle.tsx b/src/client/views/nodes/FaceRectangle.tsx index 887efc0d5..20afa4565 100644 --- a/src/client/views/nodes/FaceRectangle.tsx +++ b/src/client/views/nodes/FaceRectangle.tsx @@ -12,7 +12,7 @@ export default class FaceRectangle extends React.Component<{ rectangle: Rectangl } render() { - let rectangle = this.props.rectangle; + const rectangle = this.props.rectangle; return (
    { render() { - let faces = DocListCast(this.props.document.faces); - let templates: RectangleTemplate[] = faces.map(faceDoc => { - let rectangle = Cast(faceDoc.faceRectangle, Doc) as Doc; - let style = { + const faces = DocListCast(this.props.document.faces); + const templates: RectangleTemplate[] = faces.map(faceDoc => { + const rectangle = Cast(faceDoc.faceRectangle, Doc) as Doc; + const style = { top: NumCast(rectangle.top), left: NumCast(rectangle.left), width: NumCast(rectangle.width), diff --git a/src/client/views/nodes/FontIconBox.tsx b/src/client/views/nodes/FontIconBox.tsx index 960b55e3e..2433251b3 100644 --- a/src/client/views/nodes/FontIconBox.tsx +++ b/src/client/views/nodes/FontIconBox.tsx @@ -25,8 +25,8 @@ export class FontIconBox extends DocComponent( this._backgroundReaction = reaction(() => this.props.Document.backgroundColor, () => { if (this._ref && this._ref.current) { - let col = Utils.fromRGBAstr(getComputedStyle(this._ref.current).backgroundColor); - let colsum = (col.r + col.g + col.b); + const col = Utils.fromRGBAstr(getComputedStyle(this._ref.current).backgroundColor); + const colsum = (col.r + col.g + col.b); if (colsum / col.a > 600 || col.a < 0.25) runInAction(() => this._foregroundColor = "black"); else if (colsum / col.a <= 600 || col.a >= .25) runInAction(() => this._foregroundColor = "white"); } @@ -36,8 +36,8 @@ export class FontIconBox extends DocComponent( this._backgroundReaction && this._backgroundReaction(); } render() { - let referenceDoc = (this.props.Document.dragFactory instanceof Doc ? this.props.Document.dragFactory : this.props.Document); - let referenceLayout = Doc.Layout(referenceDoc); + const referenceDoc = (this.props.Document.dragFactory instanceof Doc ? this.props.Document.dragFactory : this.props.Document); + const referenceLayout = Doc.Layout(referenceDoc); return