From c24c16efbf798eac1003811f6c0ed98bfc65f48e Mon Sep 17 00:00:00 2001 From: Eleanor Eng Date: Sat, 16 Nov 2019 15:55:06 -0500 Subject: comment I guess --- src/server/Initialization.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) (limited to 'src') diff --git a/src/server/Initialization.ts b/src/server/Initialization.ts index fbb5ae7a6..0e5b8649e 100644 --- a/src/server/Initialization.ts +++ b/src/server/Initialization.ts @@ -19,6 +19,8 @@ import * as whm from 'webpack-hot-middleware'; import * as fs from 'fs'; import * as request from 'request'; +/* RouteSetter is a wrapper around the server that prevents the server + from being exposed. */ export type RouteSetter = (server: RouteManager) => void; export interface InitializationOptions { listenAtPort: number; @@ -38,7 +40,7 @@ export default async function InitializeServer(options: InitializationOptions) { registerAuthenticationRoutes(server); registerCorsProxy(server); - const isRelease = determineEnvironment(); + const isRelease = determineEnvironment(); //vs. dev mode routeSetter(new RouteManager(server, isRelease)); server.listen(listenAtPort, () => console.log(`server started at http://localhost:${listenAtPort}`)); @@ -73,6 +75,7 @@ function buildWithMiddleware(server: express.Express) { return server; } +/* Determine if the enviroment is dev mode or release mode. */ function determineEnvironment() { const isRelease = process.env.RELEASE === "true"; -- cgit v1.2.3-70-g09d2 From 8441282d3d07d90536464e7b084b278ea9957cf8 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Mon, 18 Nov 2019 16:59:26 -0500 Subject: log --- src/server/ActionUtilities.ts | 8 +++++++- src/server/index.ts | 5 +++-- 2 files changed, 10 insertions(+), 3 deletions(-) (limited to 'src') diff --git a/src/server/ActionUtilities.ts b/src/server/ActionUtilities.ts index 9a009791b..b418772a6 100644 --- a/src/server/ActionUtilities.ts +++ b/src/server/ActionUtilities.ts @@ -25,4 +25,10 @@ export const write_text_file = (relativePath: string, contents: any) => { return new Promise((resolve, reject) => { fs.writeFile(target, contents, (err) => err ? reject(err) : resolve()); }); -}; \ No newline at end of file +}; + +export async function log_execution(startMessage: string, endMessage: string, contents: () => void | Promise) { + console.log('\x1b[36m%s\x1b[0m', `${startMessage}...`); + await contents(); + console.log(endMessage); +} \ No newline at end of file diff --git a/src/server/index.ts b/src/server/index.ts index d68e9faa1..fb031a254 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -21,6 +21,7 @@ import { GoogleCredentialsLoader } from './credentials/CredentialsLoader'; import DeleteManager from "./ApiManagers/DeleteManager"; import PDFManager from "./ApiManagers/PDFManager"; import UploadManager from "./ApiManagers/UploadManager"; +import { log_execution } from "./ActionUtilities"; export const publicDirectory = __dirname + "/public"; export const filesDirectory = publicDirectory + "/files/"; @@ -43,7 +44,7 @@ async function preliminaryFunctions() { // divide the public directory based on type await Promise.all(Object.keys(Partitions).map(partition => DashUploadUtils.createIfNotExists(filesDirectory + partition))); // connect to the database - await Database.tryInitializeConnection(); + await log_execution("attempting to initialize database connection", "connected", Database.tryInitializeConnection); } /** @@ -243,6 +244,6 @@ function routeSetter(router: RouteManager) { } (async function start() { - await preliminaryFunctions(); + await log_execution("starting execution of preliminary functions", "completed preliminary functions", preliminaryFunctions); await initializeServer({ listenAtPort: 1050, routeSetter }); })(); -- cgit v1.2.3-70-g09d2 From 00633c834c725bab78cef5bd7b9c4ff2b1449ccf Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 19 Nov 2019 11:18:08 -0500 Subject: api key route switched to environment, added client side util --- src/Utils.ts | 7 +++++- src/client/cognitive_services/CognitiveServices.ts | 28 ++++++++++------------ src/server/ApiManagers/GeneralGoogleManager.ts | 16 ++----------- src/server/ApiManagers/GooglePhotosManager.ts | 1 + 4 files changed, 22 insertions(+), 30 deletions(-) (limited to 'src') diff --git a/src/Utils.ts b/src/Utils.ts index abff2eaba..91fa459c6 100644 --- a/src/Utils.ts +++ b/src/Utils.ts @@ -48,6 +48,11 @@ export namespace Utils { return prepend("/corsProxy/") + encodeURIComponent(url); } + export async function getApiKey(target: string): Promise { + const response = await fetch(prepend(`environment/${target.toUpperCase()}`)); + return response.text(); + } + export function CopyText(text: string) { var textArea = document.createElement("textarea"); textArea.value = text; @@ -174,7 +179,7 @@ export namespace Utils { } let idString = (message.id || "").padStart(36, ' '); prefix = prefix.padEnd(16, ' '); - console.log(`${prefix}: ${idString}, ${receiving ? 'receiving' : 'sending'} ${messageName} with data ${JSON.stringify(message)}`); + console.log(`${prefix}: ${idString}, ${receiving ? 'receiving' : 'sending'} ${messageName} with data ${JSON.stringify(message)} `); } function loggingCallback(prefix: string, func: (args: any) => any, messageName: string) { diff --git a/src/client/cognitive_services/CognitiveServices.ts b/src/client/cognitive_services/CognitiveServices.ts index e74aef998..5a7f5e991 100644 --- a/src/client/cognitive_services/CognitiveServices.ts +++ b/src/client/cognitive_services/CognitiveServices.ts @@ -38,21 +38,19 @@ export enum Confidence { export namespace CognitiveServices { const ExecuteQuery = async (service: Service, manager: APIManager, data: D): Promise => { - return fetch(Utils.prepend(`environment/${service}`)).then(async response => { - let apiKey = await response.text(); - if (!apiKey) { - console.log(`No API key found for ${service}: ensure index.ts has access to a .env file in your root directory`); - return undefined; - } - - let results: any; - try { - results = await manager.requester(apiKey, manager.converter(data), service).then(json => JSON.parse(json)); - } catch { - results = undefined; - } - return results; - }); + const apiKey = await Utils.getApiKey(service); + if (!apiKey) { + console.log(`No API key found for ${service}: ensure index.ts has access to a .env file in your root directory.`); + return undefined; + } + + let results: any; + try { + results = await manager.requester(apiKey, manager.converter(data), service).then(json => JSON.parse(json)); + } catch { + results = undefined; + } + return results; }; export namespace Image { diff --git a/src/server/ApiManagers/GeneralGoogleManager.ts b/src/server/ApiManagers/GeneralGoogleManager.ts index 89efebf78..171912185 100644 --- a/src/server/ApiManagers/GeneralGoogleManager.ts +++ b/src/server/ApiManagers/GeneralGoogleManager.ts @@ -5,11 +5,7 @@ import { Database } from "../database"; import RouteSubscriber from "../RouteSubscriber"; const deletionPermissionError = "Cannot perform specialized delete outside of the development environment!"; -const ServicesApiKeyMap = new Map([ - ["face", process.env.FACE], - ["vision", process.env.VISION], - ["handwriting", process.env.HANDWRITING] -]); + const EndpointHandlerMap = new Map([ ["create", (api, params) => api.create(params)], ["retrieve", (api, params) => api.get(params)], @@ -53,15 +49,6 @@ export default class GeneralGoogleManager extends ApiManager { } }); - register({ - method: Method.GET, - subscription: new RouteSubscriber("/cognitiveServices").add('requestedservice'), - onValidation: ({ req, res }) => { - let service = req.params.requestedservice; - res.send(ServicesApiKeyMap.get(service)); - } - }); - register({ method: Method.POST, subscription: new RouteSubscriber("/googleDocs").add("sector", "action"), @@ -79,5 +66,6 @@ export default class GeneralGoogleManager extends ApiManager { res.send(undefined); } }); + } } \ No newline at end of file diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts index 1f6051c28..1138dede1 100644 --- a/src/server/ApiManagers/GooglePhotosManager.ts +++ b/src/server/ApiManagers/GooglePhotosManager.ts @@ -107,5 +107,6 @@ export default class GooglePhotosManager extends ApiManager { _invalid(res, requestError); } }); + } } \ No newline at end of file -- cgit v1.2.3-70-g09d2 From 611bb858265b6667f2b7db858d183cea16f273aa Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 19 Nov 2019 11:23:06 -0500 Subject: rename --- src/server/ApiManagers/DownloadManager.ts | 267 ++++++++++++++++++++++++++++++ src/server/ApiManagers/ExportManager.ts | 267 ------------------------------ src/server/index.ts | 2 +- 3 files changed, 268 insertions(+), 268 deletions(-) create mode 100644 src/server/ApiManagers/DownloadManager.ts delete mode 100644 src/server/ApiManagers/ExportManager.ts (limited to 'src') diff --git a/src/server/ApiManagers/DownloadManager.ts b/src/server/ApiManagers/DownloadManager.ts new file mode 100644 index 000000000..fc6ba0d22 --- /dev/null +++ b/src/server/ApiManagers/DownloadManager.ts @@ -0,0 +1,267 @@ +import ApiManager, { Registration } from "./ApiManager"; +import { Method } from "../RouteManager"; +import RouteSubscriber from "../RouteSubscriber"; +import * as Archiver from 'archiver'; +import * as express from 'express'; +import { Database } from "../database"; +import * as path from "path"; +import { DashUploadUtils } from "../DashUploadUtils"; +import { publicDirectory } from ".."; + +export type Hierarchy = { [id: string]: string | Hierarchy }; +export type ZipMutator = (file: Archiver.Archiver) => void | Promise; +export interface DocumentElements { + data: string | any[]; + title: string; +} + +export default class DownloadManager extends ApiManager { + + protected initialize(register: Registration): void { + + /** + * Let's say someone's using Dash to organize images in collections. + * This lets them export the hierarchy they've built to their + * own file system in a useful format. + * + * This handler starts with a single document id (interesting only + * if it's that of a collection). It traverses the database, captures + * the nesting of only nested images or collections, writes + * that to a zip file and returns it to the client for download. + */ + register({ + method: Method.GET, + subscription: new RouteSubscriber("imageHierarchyExport").add('docId'), + onValidation: async ({ req, res }) => { + const id = req.params.docId; + const hierarchy: Hierarchy = {}; + await buildHierarchyRecursive(id, hierarchy); + return BuildAndDispatchZip(res, zip => writeHierarchyRecursive(zip, hierarchy)); + } + }); + + register({ + method: Method.GET, + subscription: new RouteSubscriber("downloadId").add("docId"), + onValidation: async ({ req, res }) => { + return BuildAndDispatchZip(res, async zip => { + const { id, docs, files } = await getDocs(req.params.docId); + const docString = JSON.stringify({ id, docs }); + zip.append(docString, { name: "doc.json" }); + files.forEach(val => { + zip.file(publicDirectory + val, { name: val.substring(1) }); + }); + }); + } + }); + + register({ + method: Method.GET, + subscription: new RouteSubscriber("/serializeDoc").add("docId"), + onValidation: async ({ req, res }) => { + const { docs, files } = await getDocs(req.params.docId); + res.send({ docs, files: Array.from(files) }); + } + }); + + + } + +} + +async function getDocs(id: string) { + const files = new Set(); + const docs: { [id: string]: any } = {}; + const fn = (doc: any): string[] => { + const id = doc.id; + if (typeof id === "string" && id.endsWith("Proto")) { + //Skip protos + return []; + } + const ids: string[] = []; + for (const key in doc.fields) { + if (!doc.fields.hasOwnProperty(key)) { + continue; + } + const field = doc.fields[key]; + if (field === undefined || field === null) { + continue; + } + + if (field.__type === "proxy" || field.__type === "prefetch_proxy") { + ids.push(field.fieldId); + } else if (field.__type === "script" || field.__type === "computed") { + if (field.captures) { + ids.push(field.captures.fieldId); + } + } else if (field.__type === "list") { + ids.push(...fn(field)); + } else if (typeof field === "string") { + const re = /"(?:dataD|d)ocumentId"\s*:\s*"([\w\-]*)"/g; + let match: string[] | null; + while ((match = re.exec(field)) !== null) { + ids.push(match[1]); + } + } else if (field.__type === "RichTextField") { + const re = /"href"\s*:\s*"(.*?)"/g; + let match: string[] | null; + while ((match = re.exec(field.Data)) !== null) { + const urlString = match[1]; + const split = new URL(urlString).pathname.split("doc/"); + if (split.length > 1) { + ids.push(split[split.length - 1]); + } + } + const re2 = /"src"\s*:\s*"(.*?)"/g; + while ((match = re2.exec(field.Data)) !== null) { + const urlString = match[1]; + const pathname = new URL(urlString).pathname; + files.add(pathname); + } + } else if (["audio", "image", "video", "pdf", "web"].includes(field.__type)) { + const url = new URL(field.url); + const pathname = url.pathname; + files.add(pathname); + } + } + + if (doc.id) { + docs[doc.id] = doc; + } + return ids; + }; + await Database.Instance.visit([id], fn); + return { id, docs, files }; +} + +/** + * This utility function factors out the process + * of creating a zip file and sending it back to the client + * by piping it into a response. + * + * Learn more about piping and readable / writable streams here! + * https://www.freecodecamp.org/news/node-js-streams-everything-you-need-to-know-c9141306be93/ + * + * @param res the writable stream response object that will transfer the generated zip file + * @param mutator the callback function used to actually modify and insert information into the zip instance + */ +export async function BuildAndDispatchZip(res: express.Response, mutator: ZipMutator): Promise { + res.set('Content-disposition', `attachment;`); + res.set('Content-Type', "application/zip"); + const zip = Archiver('zip'); + zip.pipe(res); + await mutator(zip); + return zip.finalize(); +} + +/** + * This function starts with a single document id as a seed, + * typically that of a collection, and then descends the entire tree + * of image or collection documents that are reachable from that seed. + * @param seedId the id of the root of the subtree we're trying to capture, interesting only if it's a collection + * @param hierarchy the data structure we're going to use to record the nesting of the collections and images as we descend + */ + +/* +Below is an example of the JSON hierarchy built from two images contained inside a collection titled 'a nested collection', +following the general recursive structure shown immediately below +{ + "parent folder name":{ + "first child's fild name":"first child's url" + ... + "nth child's fild name":"nth child's url" + } +} +{ + "a nested collection (865c4734-c036-4d67-a588-c71bb43d1440)":{ + "an image of a cat (ace99ffd-8ed8-4026-a5d5-a353fff57bdd).jpg":"https://upload.wikimedia.org/wikipedia/commons/thumb/3/3a/Cat03.jpg/1200px-Cat03.jpg", + "1*SGJw31T5Q9Zfsk24l2yirg.gif (9321cc9b-9b3e-4cb6-b99c-b7e667340f05).gif":"https://cdn-media-1.freecodecamp.org/images/1*SGJw31T5Q9Zfsk24l2yirg.gif" + } +} +*/ +async function buildHierarchyRecursive(seedId: string, hierarchy: Hierarchy): Promise { + const { title, data } = await getData(seedId); + const label = `${title} (${seedId})`; + // is the document a collection? + if (Array.isArray(data)) { + // recurse over all documents in the collection. + const local: Hierarchy = {}; // create a child hierarchy for this level, which will get passed in as the parent of the recursive call + hierarchy[label] = local; // store it at the index in the parent, so we'll end up with a map of maps of maps + await Promise.all(data.map(proxy => buildHierarchyRecursive(proxy.fieldId, local))); + } else { + // now, data can only be a string, namely the url of the image + const filename = label + path.extname(data); // this is the file name under which the output image will be stored + hierarchy[filename] = data; + } +} + +/** + * This is a very specific utility method to help traverse the database + * to parse data and titles out of images and collections alone. + * + * We don't know if the document id given to is corresponds to a view document or a data + * document. If it's a data document, the response from the database will have + * a data field. If not, call recursively on the proto, and resolve with *its* data + * + * @param targetId the id of the Dash document whose data is being requests + * @returns the data of the document, as well as its title + */ +async function getData(targetId: string): Promise { + return new Promise((resolve, reject) => { + Database.Instance.getDocument(targetId, async (result: any) => { + const { data, proto, title } = result.fields; + if (data) { + if (data.url) { + resolve({ data: data.url, title }); + } else if (data.fields) { + resolve({ data: data.fields, title }); + } else { + reject(); + } + } else if (proto) { + getData(proto.fieldId).then(resolve, reject); + } else { + reject(); + } + }); + }); +} + +/** + * + * @param file the zip file to which we write the files + * @param hierarchy the data structure from which we read, defining the nesting of the documents in the zip + * @param prefix lets us create nested folders in the zip file by continually appending to the end + * of the prefix with each layer of recursion. + * + * Function Call #1 => "Dash Export" + * Function Call #2 => "Dash Export/a nested collection" + * Function Call #3 => "Dash Export/a nested collection/lowest level collection" + * ... + */ +async function writeHierarchyRecursive(file: Archiver.Archiver, hierarchy: Hierarchy, prefix = "Dash Export"): Promise { + for (const documentTitle of Object.keys(hierarchy)) { + const result = hierarchy[documentTitle]; + // base case or leaf node, we've hit a url (image) + if (typeof result === "string") { + let path: string; + let matches: RegExpExecArray | null; + if ((matches = /\:1050\/files\/(upload\_[\da-z]{32}.*)/g.exec(result)) !== null) { + // image already exists on our server + path = `${__dirname}/public/files/${matches[1]}`; + } else { + // the image doesn't already exist on our server (may have been dragged + // and dropped in the browser and thus hosted remotely) so we upload it + // to our server and point the zip file to it, so it can bundle up the bytes + const information = await DashUploadUtils.UploadImage(result); + path = information.mediaPaths[0]; + } + // write the file specified by the path to the directory in the + // zip file given by the prefix. + file.file(path, { name: documentTitle, prefix }); + } else { + // we've hit a collection, so we have to recurse + await writeHierarchyRecursive(file, result, `${prefix}/${documentTitle}`); + } + } +} \ No newline at end of file diff --git a/src/server/ApiManagers/ExportManager.ts b/src/server/ApiManagers/ExportManager.ts deleted file mode 100644 index fc6ba0d22..000000000 --- a/src/server/ApiManagers/ExportManager.ts +++ /dev/null @@ -1,267 +0,0 @@ -import ApiManager, { Registration } from "./ApiManager"; -import { Method } from "../RouteManager"; -import RouteSubscriber from "../RouteSubscriber"; -import * as Archiver from 'archiver'; -import * as express from 'express'; -import { Database } from "../database"; -import * as path from "path"; -import { DashUploadUtils } from "../DashUploadUtils"; -import { publicDirectory } from ".."; - -export type Hierarchy = { [id: string]: string | Hierarchy }; -export type ZipMutator = (file: Archiver.Archiver) => void | Promise; -export interface DocumentElements { - data: string | any[]; - title: string; -} - -export default class DownloadManager extends ApiManager { - - protected initialize(register: Registration): void { - - /** - * Let's say someone's using Dash to organize images in collections. - * This lets them export the hierarchy they've built to their - * own file system in a useful format. - * - * This handler starts with a single document id (interesting only - * if it's that of a collection). It traverses the database, captures - * the nesting of only nested images or collections, writes - * that to a zip file and returns it to the client for download. - */ - register({ - method: Method.GET, - subscription: new RouteSubscriber("imageHierarchyExport").add('docId'), - onValidation: async ({ req, res }) => { - const id = req.params.docId; - const hierarchy: Hierarchy = {}; - await buildHierarchyRecursive(id, hierarchy); - return BuildAndDispatchZip(res, zip => writeHierarchyRecursive(zip, hierarchy)); - } - }); - - register({ - method: Method.GET, - subscription: new RouteSubscriber("downloadId").add("docId"), - onValidation: async ({ req, res }) => { - return BuildAndDispatchZip(res, async zip => { - const { id, docs, files } = await getDocs(req.params.docId); - const docString = JSON.stringify({ id, docs }); - zip.append(docString, { name: "doc.json" }); - files.forEach(val => { - zip.file(publicDirectory + val, { name: val.substring(1) }); - }); - }); - } - }); - - register({ - method: Method.GET, - subscription: new RouteSubscriber("/serializeDoc").add("docId"), - onValidation: async ({ req, res }) => { - const { docs, files } = await getDocs(req.params.docId); - res.send({ docs, files: Array.from(files) }); - } - }); - - - } - -} - -async function getDocs(id: string) { - const files = new Set(); - const docs: { [id: string]: any } = {}; - const fn = (doc: any): string[] => { - const id = doc.id; - if (typeof id === "string" && id.endsWith("Proto")) { - //Skip protos - return []; - } - const ids: string[] = []; - for (const key in doc.fields) { - if (!doc.fields.hasOwnProperty(key)) { - continue; - } - const field = doc.fields[key]; - if (field === undefined || field === null) { - continue; - } - - if (field.__type === "proxy" || field.__type === "prefetch_proxy") { - ids.push(field.fieldId); - } else if (field.__type === "script" || field.__type === "computed") { - if (field.captures) { - ids.push(field.captures.fieldId); - } - } else if (field.__type === "list") { - ids.push(...fn(field)); - } else if (typeof field === "string") { - const re = /"(?:dataD|d)ocumentId"\s*:\s*"([\w\-]*)"/g; - let match: string[] | null; - while ((match = re.exec(field)) !== null) { - ids.push(match[1]); - } - } else if (field.__type === "RichTextField") { - const re = /"href"\s*:\s*"(.*?)"/g; - let match: string[] | null; - while ((match = re.exec(field.Data)) !== null) { - const urlString = match[1]; - const split = new URL(urlString).pathname.split("doc/"); - if (split.length > 1) { - ids.push(split[split.length - 1]); - } - } - const re2 = /"src"\s*:\s*"(.*?)"/g; - while ((match = re2.exec(field.Data)) !== null) { - const urlString = match[1]; - const pathname = new URL(urlString).pathname; - files.add(pathname); - } - } else if (["audio", "image", "video", "pdf", "web"].includes(field.__type)) { - const url = new URL(field.url); - const pathname = url.pathname; - files.add(pathname); - } - } - - if (doc.id) { - docs[doc.id] = doc; - } - return ids; - }; - await Database.Instance.visit([id], fn); - return { id, docs, files }; -} - -/** - * This utility function factors out the process - * of creating a zip file and sending it back to the client - * by piping it into a response. - * - * Learn more about piping and readable / writable streams here! - * https://www.freecodecamp.org/news/node-js-streams-everything-you-need-to-know-c9141306be93/ - * - * @param res the writable stream response object that will transfer the generated zip file - * @param mutator the callback function used to actually modify and insert information into the zip instance - */ -export async function BuildAndDispatchZip(res: express.Response, mutator: ZipMutator): Promise { - res.set('Content-disposition', `attachment;`); - res.set('Content-Type', "application/zip"); - const zip = Archiver('zip'); - zip.pipe(res); - await mutator(zip); - return zip.finalize(); -} - -/** - * This function starts with a single document id as a seed, - * typically that of a collection, and then descends the entire tree - * of image or collection documents that are reachable from that seed. - * @param seedId the id of the root of the subtree we're trying to capture, interesting only if it's a collection - * @param hierarchy the data structure we're going to use to record the nesting of the collections and images as we descend - */ - -/* -Below is an example of the JSON hierarchy built from two images contained inside a collection titled 'a nested collection', -following the general recursive structure shown immediately below -{ - "parent folder name":{ - "first child's fild name":"first child's url" - ... - "nth child's fild name":"nth child's url" - } -} -{ - "a nested collection (865c4734-c036-4d67-a588-c71bb43d1440)":{ - "an image of a cat (ace99ffd-8ed8-4026-a5d5-a353fff57bdd).jpg":"https://upload.wikimedia.org/wikipedia/commons/thumb/3/3a/Cat03.jpg/1200px-Cat03.jpg", - "1*SGJw31T5Q9Zfsk24l2yirg.gif (9321cc9b-9b3e-4cb6-b99c-b7e667340f05).gif":"https://cdn-media-1.freecodecamp.org/images/1*SGJw31T5Q9Zfsk24l2yirg.gif" - } -} -*/ -async function buildHierarchyRecursive(seedId: string, hierarchy: Hierarchy): Promise { - const { title, data } = await getData(seedId); - const label = `${title} (${seedId})`; - // is the document a collection? - if (Array.isArray(data)) { - // recurse over all documents in the collection. - const local: Hierarchy = {}; // create a child hierarchy for this level, which will get passed in as the parent of the recursive call - hierarchy[label] = local; // store it at the index in the parent, so we'll end up with a map of maps of maps - await Promise.all(data.map(proxy => buildHierarchyRecursive(proxy.fieldId, local))); - } else { - // now, data can only be a string, namely the url of the image - const filename = label + path.extname(data); // this is the file name under which the output image will be stored - hierarchy[filename] = data; - } -} - -/** - * This is a very specific utility method to help traverse the database - * to parse data and titles out of images and collections alone. - * - * We don't know if the document id given to is corresponds to a view document or a data - * document. If it's a data document, the response from the database will have - * a data field. If not, call recursively on the proto, and resolve with *its* data - * - * @param targetId the id of the Dash document whose data is being requests - * @returns the data of the document, as well as its title - */ -async function getData(targetId: string): Promise { - return new Promise((resolve, reject) => { - Database.Instance.getDocument(targetId, async (result: any) => { - const { data, proto, title } = result.fields; - if (data) { - if (data.url) { - resolve({ data: data.url, title }); - } else if (data.fields) { - resolve({ data: data.fields, title }); - } else { - reject(); - } - } else if (proto) { - getData(proto.fieldId).then(resolve, reject); - } else { - reject(); - } - }); - }); -} - -/** - * - * @param file the zip file to which we write the files - * @param hierarchy the data structure from which we read, defining the nesting of the documents in the zip - * @param prefix lets us create nested folders in the zip file by continually appending to the end - * of the prefix with each layer of recursion. - * - * Function Call #1 => "Dash Export" - * Function Call #2 => "Dash Export/a nested collection" - * Function Call #3 => "Dash Export/a nested collection/lowest level collection" - * ... - */ -async function writeHierarchyRecursive(file: Archiver.Archiver, hierarchy: Hierarchy, prefix = "Dash Export"): Promise { - for (const documentTitle of Object.keys(hierarchy)) { - const result = hierarchy[documentTitle]; - // base case or leaf node, we've hit a url (image) - if (typeof result === "string") { - let path: string; - let matches: RegExpExecArray | null; - if ((matches = /\:1050\/files\/(upload\_[\da-z]{32}.*)/g.exec(result)) !== null) { - // image already exists on our server - path = `${__dirname}/public/files/${matches[1]}`; - } else { - // the image doesn't already exist on our server (may have been dragged - // and dropped in the browser and thus hosted remotely) so we upload it - // to our server and point the zip file to it, so it can bundle up the bytes - const information = await DashUploadUtils.UploadImage(result); - path = information.mediaPaths[0]; - } - // write the file specified by the path to the directory in the - // zip file given by the prefix. - file.file(path, { name: documentTitle, prefix }); - } else { - // we've hit a collection, so we have to recurse - await writeHierarchyRecursive(file, result, `${prefix}/${documentTitle}`); - } - } -} \ No newline at end of file diff --git a/src/server/index.ts b/src/server/index.ts index 569f2e139..57c66bc28 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -13,7 +13,7 @@ import UtilManager from './ApiManagers/UtilManager'; import SearchManager from './ApiManagers/SearchManager'; import UserManager from './ApiManagers/UserManager'; import { WebSocket } from './Websocket/Websocket'; -import DownloadManager from './ApiManagers/ExportManager'; +import DownloadManager from './ApiManagers/DownloadManager'; import { GoogleCredentialsLoader } from './credentials/CredentialsLoader'; import DeleteManager from "./ApiManagers/DeleteManager"; import PDFManager from "./ApiManagers/PDFManager"; -- cgit v1.2.3-70-g09d2 From 025a25a7fbf88c53d153cec19b8f93097116210e Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 19 Nov 2019 13:30:27 -0500 Subject: fixed authorization --- src/client/views/nodes/AudioBox.tsx | 3 +-- src/server/apis/google/GoogleApiServerUtils.ts | 12 +++++++----- src/server/credentials/google_project_credentials.json | 11 ++++------- 3 files changed, 12 insertions(+), 14 deletions(-) (limited to 'src') diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 86bd23b67..77b10e395 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -8,7 +8,6 @@ import { DocExtendableComponent } from "../DocComponent"; import { makeInterface, createSchema } from "../../../new_fields/Schema"; import { documentSchema } from "../../../new_fields/documentSchemas"; import { Utils, returnTrue, emptyFunction, returnOne, returnTransparent } from "../../../Utils"; -import { RouteStore } from "../../../server/RouteStore"; import { runInAction, observable, reaction, IReactionDisposer, computed, action } from "mobx"; import { DateField } from "../../../new_fields/DateField"; import { SelectionManager } from "../../util/SelectionManager"; @@ -140,7 +139,7 @@ export class AudioBox extends DocExtendableComponent { let headerParameters = { headers: { 'Content-Type': 'application/x-www-form-urlencoded' } }; + const { client_id, client_secret } = GoogleCredentialsLoader.ProjectCredentials; let url = `https://oauth2.googleapis.com/token?${qs.stringify({ refreshToken: credentials.refresh_token, - grant_type: "refresh_token", - ...installed + client_id, + client_secret, + grant_type: "refresh_token" })}`; const { access_token, expires_in } = await new Promise(async resolve => { const response = await request.post(url, headerParameters); diff --git a/src/server/credentials/google_project_credentials.json b/src/server/credentials/google_project_credentials.json index 5d9c62eb1..955c5a3c1 100644 --- a/src/server/credentials/google_project_credentials.json +++ b/src/server/credentials/google_project_credentials.json @@ -1,14 +1,11 @@ { "installed": { - "client_id": "1005546247619-kqpnvh42mpa803tem8556b87umi4j9r0.apps.googleusercontent.com", - "project_id": "brown-dash", + "client_id": "343179513178-ud6tvmh275r2fq93u9eesrnc66t6akh9.apps.googleusercontent.com", + "project_id": "quickstart-1565056383187", "auth_uri": "https://accounts.google.com/o/oauth2/auth", "token_uri": "https://oauth2.googleapis.com/token", "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", - "client_secret": "WshLb5TH9SdFVGGbQcnYj7IU", - "redirect_uris": [ - "urn:ietf:wg:oauth:2.0:oob", - "http://localhost" - ] + "client_secret": "w8KIFSc0MQpmUYHed4qEzn8b", + "redirect_uris": ["urn:ietf:wg:oauth:2.0:oob", "http://localhost"] } } \ No newline at end of file -- cgit v1.2.3-70-g09d2 From cabb1c986be9a15cfb650fd365283557c1347737 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 19 Nov 2019 14:28:27 -0500 Subject: commented out traces --- .../views/collections/collectionFreeForm/CollectionFreeFormView.tsx | 2 +- src/client/views/nodes/CollectionFreeFormDocumentView.tsx | 2 +- src/client/views/nodes/DocumentView.tsx | 6 +++--- src/client/views/nodes/FormattedTextBox.tsx | 2 +- src/client/views/nodes/ImageBox.tsx | 2 +- src/client/views/pdf/PDFViewer.tsx | 4 ++-- 6 files changed, 9 insertions(+), 9 deletions(-) (limited to 'src') diff --git a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx index 3b313c34a..59858df8d 100644 --- a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx +++ b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx @@ -858,7 +858,7 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { return eles; } render() { - trace(); + // trace(); // update the actual dimensions of the collection so that they can inquired (e.g., by a minimap) this.Document.fitX = this.contentBounds && this.contentBounds.x; this.Document.fitY = this.contentBounds && this.contentBounds.y; diff --git a/src/client/views/nodes/CollectionFreeFormDocumentView.tsx b/src/client/views/nodes/CollectionFreeFormDocumentView.tsx index d0e1d1922..bbd9859a8 100644 --- a/src/client/views/nodes/CollectionFreeFormDocumentView.tsx +++ b/src/client/views/nodes/CollectionFreeFormDocumentView.tsx @@ -87,7 +87,7 @@ export class CollectionFreeFormDocumentView extends DocComponent this.dataProvider ? this.dataProvider.height : this.panelHeight(); render() { - trace(); + // trace(); return
(Docu @computed get finalLayoutKey() { return this.props.layoutKey || "layout" } childScaling = () => (this.layoutDoc.fitWidth ? this.props.PanelWidth() / this.nativeWidth : this.props.ContentScaling()); @computed get contents() { - trace(); + // trace(); return ((Docu } @computed get innards() { - trace(); + // trace(); const showOverlays = this.props.showOverlays ? this.props.showOverlays(this.Document) : undefined; const showTitle = showOverlays && "title" in showOverlays ? showOverlays.title : this.getLayoutPropStr("showTitle"); const showCaption = showOverlays && "caption" in showOverlays ? showOverlays.caption : this.getLayoutPropStr("showCaption"); @@ -640,7 +640,7 @@ export class DocumentView extends DocComponent(Docu } render() { if (!this.props.Document) return (null); - trace(); + // trace(); const ruleColor = this.props.ruleProvider ? StrCast(this.props.ruleProvider["ruleColor_" + this.Document.heading]) : undefined; const ruleRounding = this.props.ruleProvider ? StrCast(this.props.ruleProvider["ruleRounding_" + this.Document.heading]) : undefined; const colorSet = this.setsLayoutProp("backgroundColor"); diff --git a/src/client/views/nodes/FormattedTextBox.tsx b/src/client/views/nodes/FormattedTextBox.tsx index 4a79a44d4..ef54a387c 100644 --- a/src/client/views/nodes/FormattedTextBox.tsx +++ b/src/client/views/nodes/FormattedTextBox.tsx @@ -1015,7 +1015,7 @@ export class FormattedTextBox extends DocAnnotatableComponent<(FieldViewProps & @computed get sidebarWidth() { return Number(this.sidebarWidthPercent.substring(0, this.sidebarWidthPercent.length - 1)) / 100 * this.props.PanelWidth(); } @computed get annotationsKey() { return "annotations"; } render() { - trace(); + // trace(); let rounded = StrCast(this.layoutDoc.borderRounding) === "100%" ? "-rounded" : ""; let interactive = InkingControl.Instance.selectedTool || this.layoutDoc.isBackground; if (this.props.isSelected()) { diff --git a/src/client/views/nodes/ImageBox.tsx b/src/client/views/nodes/ImageBox.tsx index 4ed350d2b..7a9ebc00a 100644 --- a/src/client/views/nodes/ImageBox.tsx +++ b/src/client/views/nodes/ImageBox.tsx @@ -267,7 +267,7 @@ export class ImageBox extends DocAnnotatableComponent {this.nonDocAnnotations.sort((a, b) => NumCast(a.y) - NumCast(b.y)).map((anno, index) => )} @@ -671,7 +671,7 @@ export class PDFViewer extends DocAnnotatableComponent this.props.PanelHeight() / this.props.ContentScaling() * 72 / 96; contentZoom = () => this._zoomed; render() { - trace(); + // trace(); return !this.extensionDoc ? (null) :
Date: Tue, 19 Nov 2019 16:34:20 -0500 Subject: passport fix --- src/server/authentication/config/passport.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src') diff --git a/src/server/authentication/config/passport.ts b/src/server/authentication/config/passport.ts index 0b15c3a36..726df7fd7 100644 --- a/src/server/authentication/config/passport.ts +++ b/src/server/authentication/config/passport.ts @@ -40,7 +40,7 @@ export let isAuthenticated = (req: Request, res: Response, next: NextFunction) = export let isAuthorized = (req: Request, res: Response, next: NextFunction) => { const provider = req.path.split("/").slice(-1)[0]; - if (_.find((req.user).tokens, { kind: provider })) { + if (_.find((req.user as any).tokens!, { kind: provider })) { next(); } else { res.redirect(`/auth/${provider}`); -- cgit v1.2.3-70-g09d2 From 8b6f70cd0ddac6d2669c3b0624d59a866737497c Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 19 Nov 2019 17:23:55 -0500 Subject: small --- src/server/index.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) (limited to 'src') diff --git a/src/server/index.ts b/src/server/index.ts index 57c66bc28..ad9c5886c 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -56,8 +56,7 @@ async function preliminaryFunctions() { * with the server */ function routeSetter(router: RouteManager) { - // initialize API Managers - [ + const managers = [ new UserManager(), new UploadManager(), new DownloadManager(), @@ -67,7 +66,10 @@ function routeSetter(router: RouteManager) { new UtilManager(), new GeneralGoogleManager(), new GooglePhotosManager(), - ].forEach(manager => manager.register(router)); + ]; + + // initialize API Managers + managers.forEach(manager => manager.register(router)); // initialize the web socket (bidirectional communication: if a user changes // a field on one client, that change must be broadcast to all other clients) -- cgit v1.2.3-70-g09d2