diff options
| author | Stanley Yip <stanley_yip@brown.edu> | 2020-01-08 13:47:29 -0500 |
|---|---|---|
| committer | Stanley Yip <stanley_yip@brown.edu> | 2020-01-08 13:47:29 -0500 |
| commit | abfa42b6f2cf863deee19aac19328a23687464cb (patch) | |
| tree | b481f23ffa7bccbde7a31de34f50d765b6b73162 /src/server | |
| parent | d8fc218f3481728f221ceacc60ac4bc553f8e295 (diff) | |
| parent | 19a71cb2788b9c1c8d8ced4af285bf91033ba626 (diff) | |
Merge branch 'master' of https://github.com/browngraphicslab/Dash-Web into pen
Diffstat (limited to 'src/server')
40 files changed, 3599 insertions, 1621 deletions
diff --git a/src/server/ActionUtilities.ts b/src/server/ActionUtilities.ts new file mode 100644 index 000000000..a93566fb1 --- /dev/null +++ b/src/server/ActionUtilities.ts @@ -0,0 +1,152 @@ +import { readFile, writeFile, exists, mkdir, unlink, createWriteStream } from 'fs'; +import { ExecOptions } from 'shelljs'; +import { exec } from 'child_process'; +import * as path from 'path'; +import * as rimraf from "rimraf"; +import { yellow, Color } from 'colors'; +import * as nodemailer from "nodemailer"; +import { MailOptions } from "nodemailer/lib/json-transport"; + +const projectRoot = path.resolve(__dirname, "../../"); +export function pathFromRoot(relative?: string) { + if (!relative) { + return projectRoot; + } + return path.resolve(projectRoot, relative); +} + +export async function fileDescriptorFromStream(path: string) { + const logStream = createWriteStream(path); + return new Promise<number>(resolve => logStream.on("open", resolve)); +} + +export const command_line = (command: string, fromDirectory?: string) => { + return new Promise<string>((resolve, reject) => { + const options: ExecOptions = {}; + if (fromDirectory) { + options.cwd = fromDirectory ? path.resolve(projectRoot, fromDirectory) : projectRoot; + } + exec(command, options, (err, stdout) => err ? reject(err) : resolve(stdout)); + }); +}; + +export const read_text_file = (relativePath: string) => { + const target = path.resolve(__dirname, relativePath); + return new Promise<string>((resolve, reject) => { + readFile(target, (err, data) => err ? reject(err) : resolve(data.toString())); + }); +}; + +export const write_text_file = (relativePath: string, contents: any) => { + const target = path.resolve(__dirname, relativePath); + return new Promise<void>((resolve, reject) => { + writeFile(target, contents, (err) => err ? reject(err) : resolve()); + }); +}; + +export type Messager<T> = (outcome: { result: T | undefined, error: Error | null }) => string; + +export interface LogData<T> { + startMessage: string; + // if you care about the execution informing your log, you can pass in a function that takes in the result and a potential error and decides what to write + endMessage: string | Messager<T>; + action: () => T | Promise<T>; + color?: Color; +} + +let current = Math.ceil(Math.random() * 20); +export async function log_execution<T>({ startMessage, endMessage, action, color }: LogData<T>): Promise<T | undefined> { + let result: T | undefined = undefined, error: Error | null = null; + const resolvedColor = color || `\x1b[${31 + ++current % 6}m%s\x1b[0m`; + log_helper(`${startMessage}...`, resolvedColor); + try { + result = await action(); + } catch (e) { + error = e; + } finally { + log_helper(typeof endMessage === "string" ? endMessage : endMessage({ result, error }), resolvedColor); + } + return result; +} + +function log_helper(content: string, color: Color | string) { + if (typeof color === "string") { + console.log(color, content); + } else { + console.log(color(content)); + } +} + +export function logPort(listener: string, port: number) { + console.log(`${listener} listening on port ${yellow(String(port))}`); +} + +export function msToTime(duration: number) { + const milliseconds = Math.floor((duration % 1000) / 100), + seconds = Math.floor((duration / 1000) % 60), + minutes = Math.floor((duration / (1000 * 60)) % 60), + hours = Math.floor((duration / (1000 * 60 * 60)) % 24); + + const hoursS = (hours < 10) ? "0" + hours : hours; + const minutesS = (minutes < 10) ? "0" + minutes : minutes; + const secondsS = (seconds < 10) ? "0" + seconds : seconds; + + return hoursS + ":" + minutesS + ":" + secondsS + "." + milliseconds; +} + +export const createIfNotExists = async (path: string) => { + if (await new Promise<boolean>(resolve => exists(path, resolve))) { + return true; + } + return new Promise<boolean>(resolve => mkdir(path, error => resolve(error === null))); +}; + +export async function Prune(rootDirectory: string): Promise<boolean> { + const error = await new Promise<Error>(resolve => rimraf(rootDirectory, resolve)); + return error === null; +} + +export const Destroy = (mediaPath: string) => new Promise<boolean>(resolve => unlink(mediaPath, error => resolve(error === null))); + +export namespace Email { + + const smtpTransport = nodemailer.createTransport({ + service: 'Gmail', + auth: { + user: 'brownptcdash@gmail.com', + pass: 'browngfx1' + } + }); + + export interface DispatchFailure { + recipient: string; + error: Error; + } + + export async function dispatchAll(recipients: string[], subject: string, content: string) { + const failures: DispatchFailure[] = []; + await Promise.all(recipients.map(async (recipient: string) => { + let error: Error | null; + if ((error = await Email.dispatch(recipient, subject, content)) !== null) { + failures.push({ + recipient, + error + }); + } + })); + return failures.length ? failures : undefined; + } + + export async function dispatch(recipient: string, subject: string, content: string): Promise<Error | null> { + const mailOptions = { + to: recipient, + from: 'brownptcdash@gmail.com', + subject, + text: `Hello ${recipient.split("@")[0]},\n\n${content}` + } as MailOptions; + return new Promise<Error | null>(resolve => { + smtpTransport.sendMail(mailOptions, resolve); + }); + } + +}
\ No newline at end of file diff --git a/src/server/ApiManagers/ApiManager.ts b/src/server/ApiManagers/ApiManager.ts new file mode 100644 index 000000000..e2b01d585 --- /dev/null +++ b/src/server/ApiManagers/ApiManager.ts @@ -0,0 +1,11 @@ +import RouteManager, { RouteInitializer } from "../RouteManager"; + +export type Registration = (initializer: RouteInitializer) => void; + +export default abstract class ApiManager { + protected abstract initialize(register: Registration): void; + + public register(register: Registration) { + this.initialize(register); + } +}
\ No newline at end of file diff --git a/src/server/ApiManagers/DeleteManager.ts b/src/server/ApiManagers/DeleteManager.ts new file mode 100644 index 000000000..88dfa6a64 --- /dev/null +++ b/src/server/ApiManagers/DeleteManager.ts @@ -0,0 +1,63 @@ +import ApiManager, { Registration } from "./ApiManager"; +import { Method, _permission_denied } from "../RouteManager"; +import { WebSocket } from "../Websocket/Websocket"; +import { Database } from "../database"; + +export default class DeleteManager extends ApiManager { + + protected initialize(register: Registration): void { + + register({ + method: Method.GET, + subscription: "/delete", + secureHandler: async ({ res, isRelease }) => { + if (isRelease) { + return _permission_denied(res, deletionPermissionError); + } + await WebSocket.deleteFields(); + res.redirect("/home"); + } + }); + + register({ + method: Method.GET, + subscription: "/deleteAll", + secureHandler: async ({ res, isRelease }) => { + if (isRelease) { + return _permission_denied(res, deletionPermissionError); + } + await WebSocket.deleteAll(); + res.redirect("/home"); + } + }); + + + register({ + method: Method.GET, + subscription: "/deleteWithAux", + secureHandler: async ({ res, isRelease }) => { + if (isRelease) { + return _permission_denied(res, deletionPermissionError); + } + await Database.Auxiliary.DeleteAll(); + res.redirect("/delete"); + } + }); + + register({ + method: Method.GET, + subscription: "/deleteWithGoogleCredentials", + secureHandler: async ({ res, isRelease }) => { + if (isRelease) { + return _permission_denied(res, deletionPermissionError); + } + await Database.Auxiliary.GoogleAuthenticationToken.DeleteAll(); + res.redirect("/delete"); + } + }); + + } + +} + +const deletionPermissionError = "Cannot perform a delete operation outside of the development environment!"; diff --git a/src/server/ApiManagers/DownloadManager.ts b/src/server/ApiManagers/DownloadManager.ts new file mode 100644 index 000000000..1bb84f374 --- /dev/null +++ b/src/server/ApiManagers/DownloadManager.ts @@ -0,0 +1,267 @@ +import ApiManager, { Registration } from "./ApiManager"; +import { Method } from "../RouteManager"; +import RouteSubscriber from "../RouteSubscriber"; +import * as Archiver from 'archiver'; +import * as express from 'express'; +import { Database } from "../database"; +import * as path from "path"; +import { DashUploadUtils, SizeSuffix } from "../DashUploadUtils"; +import { publicDirectory } from ".."; +import { serverPathToFile, Directory } from "./UploadManager"; + +export type Hierarchy = { [id: string]: string | Hierarchy }; +export type ZipMutator = (file: Archiver.Archiver) => void | Promise<void>; +export interface DocumentElements { + data: string | any[]; + title: string; +} + +export default class DownloadManager extends ApiManager { + + protected initialize(register: Registration): void { + + /** + * Let's say someone's using Dash to organize images in collections. + * This lets them export the hierarchy they've built to their + * own file system in a useful format. + * + * This handler starts with a single document id (interesting only + * if it's that of a collection). It traverses the database, captures + * the nesting of only nested images or collections, writes + * that to a zip file and returns it to the client for download. + */ + register({ + method: Method.GET, + subscription: new RouteSubscriber("imageHierarchyExport").add('docId'), + secureHandler: async ({ req, res }) => { + const id = req.params.docId; + const hierarchy: Hierarchy = {}; + await buildHierarchyRecursive(id, hierarchy); + return BuildAndDispatchZip(res, zip => writeHierarchyRecursive(zip, hierarchy)); + } + }); + + register({ + method: Method.GET, + subscription: new RouteSubscriber("downloadId").add("docId"), + secureHandler: async ({ req, res }) => { + return BuildAndDispatchZip(res, async zip => { + const { id, docs, files } = await getDocs(req.params.docId); + const docString = JSON.stringify({ id, docs }); + zip.append(docString, { name: "doc.json" }); + files.forEach(val => { + zip.file(publicDirectory + val, { name: val.substring(1) }); + }); + }); + } + }); + + register({ + method: Method.GET, + subscription: new RouteSubscriber("serializeDoc").add("docId"), + secureHandler: async ({ req, res }) => { + const { docs, files } = await getDocs(req.params.docId); + res.send({ docs, files: Array.from(files) }); + } + }); + + } + +} + +async function getDocs(id: string) { + const files = new Set<string>(); + const docs: { [id: string]: any } = {}; + const fn = (doc: any): string[] => { + const id = doc.id; + if (typeof id === "string" && id.endsWith("Proto")) { + //Skip protos + return []; + } + const ids: string[] = []; + for (const key in doc.fields) { + if (!doc.fields.hasOwnProperty(key)) { + continue; + } + const field = doc.fields[key]; + if (field === undefined || field === null) { + continue; + } + + if (field.__type === "proxy" || field.__type === "prefetch_proxy") { + ids.push(field.fieldId); + } else if (field.__type === "script" || field.__type === "computed") { + if (field.captures) { + ids.push(field.captures.fieldId); + } + } else if (field.__type === "list") { + ids.push(...fn(field)); + } else if (typeof field === "string") { + const re = /"(?:dataD|d)ocumentId"\s*:\s*"([\w\-]*)"/g; + let match: string[] | null; + while ((match = re.exec(field)) !== null) { + ids.push(match[1]); + } + } else if (field.__type === "RichTextField") { + const re = /"href"\s*:\s*"(.*?)"/g; + let match: string[] | null; + while ((match = re.exec(field.Data)) !== null) { + const urlString = match[1]; + const split = new URL(urlString).pathname.split("doc/"); + if (split.length > 1) { + ids.push(split[split.length - 1]); + } + } + const re2 = /"src"\s*:\s*"(.*?)"/g; + while ((match = re2.exec(field.Data)) !== null) { + const urlString = match[1]; + const pathname = new URL(urlString).pathname; + files.add(pathname); + } + } else if (["audio", "image", "video", "pdf", "web"].includes(field.__type)) { + const url = new URL(field.url); + const pathname = url.pathname; + files.add(pathname); + } + } + + if (doc.id) { + docs[doc.id] = doc; + } + return ids; + }; + await Database.Instance.visit([id], fn); + return { id, docs, files }; +} + +/** + * This utility function factors out the process + * of creating a zip file and sending it back to the client + * by piping it into a response. + * + * Learn more about piping and readable / writable streams here! + * https://www.freecodecamp.org/news/node-js-streams-everything-you-need-to-know-c9141306be93/ + * + * @param res the writable stream response object that will transfer the generated zip file + * @param mutator the callback function used to actually modify and insert information into the zip instance + */ +export async function BuildAndDispatchZip(res: express.Response, mutator: ZipMutator): Promise<void> { + res.set('Content-disposition', `attachment;`); + res.set('Content-Type', "application/zip"); + const zip = Archiver('zip'); + zip.pipe(res); + await mutator(zip); + return zip.finalize(); +} + +/** + * This function starts with a single document id as a seed, + * typically that of a collection, and then descends the entire tree + * of image or collection documents that are reachable from that seed. + * @param seedId the id of the root of the subtree we're trying to capture, interesting only if it's a collection + * @param hierarchy the data structure we're going to use to record the nesting of the collections and images as we descend + */ + +/* +Below is an example of the JSON hierarchy built from two images contained inside a collection titled 'a nested collection', +following the general recursive structure shown immediately below +{ + "parent folder name":{ + "first child's fild name":"first child's url" + ... + "nth child's fild name":"nth child's url" + } +} +{ + "a nested collection (865c4734-c036-4d67-a588-c71bb43d1440)":{ + "an image of a cat (ace99ffd-8ed8-4026-a5d5-a353fff57bdd).jpg":"https://upload.wikimedia.org/wikipedia/commons/thumb/3/3a/Cat03.jpg/1200px-Cat03.jpg", + "1*SGJw31T5Q9Zfsk24l2yirg.gif (9321cc9b-9b3e-4cb6-b99c-b7e667340f05).gif":"https://cdn-media-1.freecodecamp.org/images/1*SGJw31T5Q9Zfsk24l2yirg.gif" + } +} +*/ +async function buildHierarchyRecursive(seedId: string, hierarchy: Hierarchy): Promise<void> { + const { title, data } = await getData(seedId); + const label = `${title} (${seedId})`; + // is the document a collection? + if (Array.isArray(data)) { + // recurse over all documents in the collection. + const local: Hierarchy = {}; // create a child hierarchy for this level, which will get passed in as the parent of the recursive call + hierarchy[label] = local; // store it at the index in the parent, so we'll end up with a map of maps of maps + await Promise.all(data.map(proxy => buildHierarchyRecursive(proxy.fieldId, local))); + } else { + // now, data can only be a string, namely the url of the image + const filename = label + path.extname(data); // this is the file name under which the output image will be stored + hierarchy[filename] = data; + } +} + +/** + * This is a very specific utility method to help traverse the database + * to parse data and titles out of images and collections alone. + * + * We don't know if the document id given to is corresponds to a view document or a data + * document. If it's a data document, the response from the database will have + * a data field. If not, call recursively on the proto, and resolve with *its* data + * + * @param targetId the id of the Dash document whose data is being requests + * @returns the data of the document, as well as its title + */ +async function getData(targetId: string): Promise<DocumentElements> { + return new Promise<DocumentElements>((resolve, reject) => { + Database.Instance.getDocument(targetId, async (result: any) => { + const { data, proto, title } = result.fields; + if (data) { + if (data.url) { + resolve({ data: data.url, title }); + } else if (data.fields) { + resolve({ data: data.fields, title }); + } else { + reject(); + } + } else if (proto) { + getData(proto.fieldId).then(resolve, reject); + } else { + reject(); + } + }); + }); +} + +/** + * + * @param file the zip file to which we write the files + * @param hierarchy the data structure from which we read, defining the nesting of the documents in the zip + * @param prefix lets us create nested folders in the zip file by continually appending to the end + * of the prefix with each layer of recursion. + * + * Function Call #1 => "Dash Export" + * Function Call #2 => "Dash Export/a nested collection" + * Function Call #3 => "Dash Export/a nested collection/lowest level collection" + * ... + */ +async function writeHierarchyRecursive(file: Archiver.Archiver, hierarchy: Hierarchy, prefix = "Dash Export"): Promise<void> { + for (const documentTitle of Object.keys(hierarchy)) { + const result = hierarchy[documentTitle]; + // base case or leaf node, we've hit a url (image) + if (typeof result === "string") { + let path: string; + let matches: RegExpExecArray | null; + if ((matches = /\:1050\/files\/images\/(upload\_[\da-z]{32}.*)/g.exec(result)) !== null) { + // image already exists on our server + path = serverPathToFile(Directory.images, matches[1]); + } else { + // the image doesn't already exist on our server (may have been dragged + // and dropped in the browser and thus hosted remotely) so we upload it + // to our server and point the zip file to it, so it can bundle up the bytes + const information = await DashUploadUtils.UploadImage(result); + path = information.serverAccessPaths[SizeSuffix.Original]; + } + // write the file specified by the path to the directory in the + // zip file given by the prefix. + file.file(path, { name: documentTitle, prefix }); + } else { + // we've hit a collection, so we have to recurse + await writeHierarchyRecursive(file, result, `${prefix}/${documentTitle}`); + } + } +}
\ No newline at end of file diff --git a/src/server/ApiManagers/GeneralGoogleManager.ts b/src/server/ApiManagers/GeneralGoogleManager.ts new file mode 100644 index 000000000..a5240edbc --- /dev/null +++ b/src/server/ApiManagers/GeneralGoogleManager.ts @@ -0,0 +1,61 @@ +import ApiManager, { Registration } from "./ApiManager"; +import { Method, _permission_denied } from "../RouteManager"; +import { GoogleApiServerUtils } from "../apis/google/GoogleApiServerUtils"; +import { Database } from "../database"; +import RouteSubscriber from "../RouteSubscriber"; + +const deletionPermissionError = "Cannot perform specialized delete outside of the development environment!"; + +const EndpointHandlerMap = new Map<GoogleApiServerUtils.Action, GoogleApiServerUtils.ApiRouter>([ + ["create", (api, params) => api.create(params)], + ["retrieve", (api, params) => api.get(params)], + ["update", (api, params) => api.batchUpdate(params)], +]); + +export default class GeneralGoogleManager extends ApiManager { + + protected initialize(register: Registration): void { + + register({ + method: Method.GET, + subscription: "/readGoogleAccessToken", + secureHandler: async ({ user, res }) => { + const token = await GoogleApiServerUtils.retrieveAccessToken(user.id); + if (!token) { + return res.send(GoogleApiServerUtils.generateAuthenticationUrl()); + } + return res.send(token); + } + }); + + register({ + method: Method.POST, + subscription: "/writeGoogleAccessToken", + secureHandler: async ({ user, req, res }) => { + res.send(await GoogleApiServerUtils.processNewUser(user.id, req.body.authenticationCode)); + } + }); + + register({ + method: Method.POST, + subscription: new RouteSubscriber("googleDocs").add("sector", "action"), + secureHandler: async ({ req, res, user }) => { + const sector: GoogleApiServerUtils.Service = req.params.sector as GoogleApiServerUtils.Service; + const action: GoogleApiServerUtils.Action = req.params.action as GoogleApiServerUtils.Action; + const endpoint = await GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], user.id); + const handler = EndpointHandlerMap.get(action); + if (endpoint && handler) { + try { + const response = await handler(endpoint, req.body); + res.send(response.data); + } catch (e) { + res.send(e); + } + return; + } + res.send(undefined); + } + }); + + } +}
\ No newline at end of file diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts new file mode 100644 index 000000000..107542ce2 --- /dev/null +++ b/src/server/ApiManagers/GooglePhotosManager.ts @@ -0,0 +1,115 @@ +import ApiManager, { Registration } from "./ApiManager"; +import { Method, _error, _success, _invalid } from "../RouteManager"; +import * as path from "path"; +import { GoogleApiServerUtils } from "../apis/google/GoogleApiServerUtils"; +import { BatchedArray, TimeUnit } from "array-batcher"; +import { GooglePhotosUploadUtils } from "../apis/google/GooglePhotosUploadUtils"; +import { Opt } from "../../new_fields/Doc"; +import { DashUploadUtils, InjectSize, SizeSuffix } from "../DashUploadUtils"; +import { Database } from "../database"; + +const authenticationError = "Unable to authenticate Google credentials before uploading to Google Photos!"; +const mediaError = "Unable to convert all uploaded bytes to media items!"; +const UploadError = (count: number) => `Unable to upload ${count} images to Dash's server`; +const requestError = "Unable to execute download: the body's media items were malformed."; +const downloadError = "Encountered an error while executing downloads."; +interface GooglePhotosUploadFailure { + batch: number; + index: number; + url: string; + reason: string; +} +interface MediaItem { + baseUrl: string; + filename: string; +} +interface NewMediaItem { + description: string; + simpleMediaItem: { + uploadToken: string; + }; +} +const prefix = "google_photos_"; + +/** + * This manager handles the creation of routes for google photos functionality. + */ +export default class GooglePhotosManager extends ApiManager { + + protected initialize(register: Registration): void { + + register({ + method: Method.POST, + subscription: "/googlePhotosMediaUpload", + secureHandler: async ({ user, req, res }) => { + const { media } = req.body; + const token = await GoogleApiServerUtils.retrieveAccessToken(user.id); + if (!token) { + return _error(res, authenticationError); + } + const failed: GooglePhotosUploadFailure[] = []; + const batched = BatchedArray.from<GooglePhotosUploadUtils.UploadSource>(media, { batchSize: 25 }); + const newMediaItems = await batched.batchedMapPatientInterval<NewMediaItem>( + { magnitude: 100, unit: TimeUnit.Milliseconds }, + async (batch: any, collector: any, { completedBatches }: any) => { + for (let index = 0; index < batch.length; index++) { + const { url, description } = batch[index]; + const fail = (reason: string) => failed.push({ reason, batch: completedBatches + 1, index, url }); + const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(token, InjectSize(url, SizeSuffix.Original)).catch(fail); + if (!uploadToken) { + fail(`${path.extname(url)} is not an accepted extension`); + } else { + collector.push({ + description, + simpleMediaItem: { uploadToken } + }); + } + } + } + ); + const failedCount = failed.length; + if (failedCount) { + console.error(`Unable to upload ${failedCount} image${failedCount === 1 ? "" : "s"} to Google's servers`); + console.log(failed.map(({ reason, batch, index, url }) => `@${batch}.${index}: ${url} failed:\n${reason}`).join('\n\n')); + } + return GooglePhotosUploadUtils.CreateMediaItems(token, newMediaItems, req.body.album).then( + results => _success(res, { results, failed }), + error => _error(res, mediaError, error) + ); + } + }); + + register({ + method: Method.POST, + subscription: "/googlePhotosMediaDownload", + secureHandler: async ({ req, res }) => { + const contents: { mediaItems: MediaItem[] } = req.body; + let failed = 0; + if (contents) { + const completed: Opt<DashUploadUtils.ImageUploadInformation>[] = []; + for (const item of contents.mediaItems) { + const { contentSize, ...attributes } = await DashUploadUtils.InspectImage(item.baseUrl); + const found: Opt<DashUploadUtils.ImageUploadInformation> = await Database.Auxiliary.QueryUploadHistory(contentSize!); + if (!found) { + const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error)); + if (upload) { + completed.push(upload); + await Database.Auxiliary.LogUpload(upload); + } else { + failed++; + } + } else { + completed.push(found); + } + } + if (failed) { + return _error(res, UploadError(failed)); + } + return _success(res, completed); + } + _invalid(res, requestError); + } + }); + + } +}
\ No newline at end of file diff --git a/src/server/ApiManagers/PDFManager.ts b/src/server/ApiManagers/PDFManager.ts new file mode 100644 index 000000000..0136b758e --- /dev/null +++ b/src/server/ApiManagers/PDFManager.ts @@ -0,0 +1,115 @@ +import ApiManager, { Registration } from "./ApiManager"; +import { Method } from "../RouteManager"; +import RouteSubscriber from "../RouteSubscriber"; +import { existsSync, createReadStream, createWriteStream } from "fs"; +import * as Pdfjs from 'pdfjs-dist'; +import { createCanvas } from "canvas"; +const imageSize = require("probe-image-size"); +import * as express from "express"; +import * as path from "path"; +import { Directory, serverPathToFile, clientPathToFile } from "./UploadManager"; +import { red } from "colors"; + +export default class PDFManager extends ApiManager { + + protected initialize(register: Registration): void { + + register({ + method: Method.GET, + subscription: new RouteSubscriber("thumbnail").add("filename"), + secureHandler: ({ req, res }) => getOrCreateThumbnail(req.params.filename, res) + }); + + } + +} + +async function getOrCreateThumbnail(thumbnailName: string, res: express.Response): Promise<void> { + const noExtension = thumbnailName.substring(0, thumbnailName.length - ".png".length); + const pageString = noExtension.split('-')[1]; + const pageNumber = parseInt(pageString); + return new Promise<void>(async resolve => { + const path = serverPathToFile(Directory.pdf_thumbnails, thumbnailName); + if (existsSync(path)) { + const existingThumbnail = createReadStream(path); + const { err, viewport } = await new Promise<any>(resolve => { + imageSize(existingThumbnail, (err: any, viewport: any) => resolve({ err, viewport })); + }); + if (err) { + console.log(red(`In PDF thumbnail response, unable to determine dimensions of ${thumbnailName}:`)); + console.log(err); + return; + } + dispatchThumbnail(res, viewport, thumbnailName); + } else { + const offset = thumbnailName.length - pageString.length - 5; + const name = thumbnailName.substring(0, offset) + ".pdf"; + const path = serverPathToFile(Directory.pdfs, name); + await CreateThumbnail(path, pageNumber, res); + } + resolve(); + }); +} + +async function CreateThumbnail(file: string, pageNumber: number, res: express.Response) { + const documentProxy = await Pdfjs.getDocument(file).promise; + const factory = new NodeCanvasFactory(); + const page = await documentProxy.getPage(pageNumber); + const viewport = page.getViewport(1 as any); + const { canvas, context } = factory.create(viewport.width, viewport.height); + const renderContext = { + canvasContext: context, + canvasFactory: factory, + viewport + }; + await page.render(renderContext).promise; + const pngStream = canvas.createPNGStream(); + const filenames = path.basename(file).split("."); + const thumbnailName = `${filenames[0]}-${pageNumber}.png`; + const pngFile = serverPathToFile(Directory.pdf_thumbnails, thumbnailName); + const out = createWriteStream(pngFile); + pngStream.pipe(out); + return new Promise<void>((resolve, reject) => { + out.on("finish", () => { + dispatchThumbnail(res, viewport, thumbnailName); + resolve(); + }); + out.on("error", error => { + console.log(red(`In PDF thumbnail creation, encountered the following error when piping ${pngFile}:`)); + console.log(error); + reject(); + }); + }); +} + +function dispatchThumbnail(res: express.Response, { width, height }: Pdfjs.PDFPageViewport, thumbnailName: string) { + res.send({ + path: clientPathToFile(Directory.pdf_thumbnails, thumbnailName), + width, + height + }); +} + +class NodeCanvasFactory { + + create = (width: number, height: number) => { + const canvas = createCanvas(width, height); + const context = canvas.getContext('2d'); + return { + canvas, + context + }; + } + + reset = (canvasAndContext: any, width: number, height: number) => { + canvasAndContext.canvas.width = width; + canvasAndContext.canvas.height = height; + } + + destroy = (canvasAndContext: any) => { + canvasAndContext.canvas.width = 0; + canvasAndContext.canvas.height = 0; + canvasAndContext.canvas = null; + canvasAndContext.context = null; + } +}
\ No newline at end of file diff --git a/src/server/ApiManagers/SearchManager.ts b/src/server/ApiManagers/SearchManager.ts new file mode 100644 index 000000000..316ba09ed --- /dev/null +++ b/src/server/ApiManagers/SearchManager.ts @@ -0,0 +1,85 @@ +import ApiManager, { Registration } from "./ApiManager"; +import { Method } from "../RouteManager"; +import { Search } from "../Search"; +const findInFiles = require('find-in-files'); +import * as path from 'path'; +import { pathToDirectory, Directory } from "./UploadManager"; +import { red, cyan, yellow } from "colors"; +import RouteSubscriber from "../RouteSubscriber"; +import { exec } from "child_process"; +import { onWindows } from ".."; + +export class SearchManager extends ApiManager { + + protected initialize(register: Registration): void { + + register({ + method: Method.GET, + subscription: new RouteSubscriber("solr").add("action"), + secureHandler: async ({ req, res }) => { + const { action } = req.params; + if (["start", "stop"].includes(action)) { + const status = req.params.action === "start"; + const success = await SolrManager.SetRunning(status); + console.log(success ? `Successfully ${status ? "started" : "stopped"} Solr!` : `Uh oh! Check the console for the error that occurred while ${status ? "starting" : "stopping"} Solr`); + } + res.redirect("/home"); + } + }); + + register({ + method: Method.GET, + subscription: "/textsearch", + secureHandler: async ({ req, res }) => { + const q = req.query.q; + if (q === undefined) { + res.send([]); + return; + } + const results = await findInFiles.find({ 'term': q, 'flags': 'ig' }, pathToDirectory(Directory.text), ".txt$"); + const resObj: { ids: string[], numFound: number, lines: string[] } = { ids: [], numFound: 0, lines: [] }; + for (const result in results) { + resObj.ids.push(path.basename(result, ".txt").replace(/upload_/, "")); + resObj.lines.push(results[result].line); + resObj.numFound++; + } + res.send(resObj); + } + }); + + register({ + method: Method.GET, + subscription: "/search", + secureHandler: async ({ req, res }) => { + const solrQuery: any = {}; + ["q", "fq", "start", "rows", "hl", "hl.fl"].forEach(key => solrQuery[key] = req.query[key]); + if (solrQuery.q === undefined) { + res.send([]); + return; + } + const results = await Search.search(solrQuery); + res.send(results); + } + }); + + } + +} + +export namespace SolrManager { + + export async function SetRunning(status: boolean): Promise<boolean> { + const args = status ? "start" : "stop -p 8983"; + console.log(`Solr management: trying to ${args}`); + exec(`${onWindows ? "solr.cmd" : "solr"} ${args}`, { cwd: "./solr-8.3.1/bin" }, (error, stdout, stderr) => { + if (error) { + console.log(red(error.message)); + console.log(red(`Solr management error: unable to ${args}`)); + } + console.log(cyan(stdout)); + console.log(yellow(stderr)); + }); + return true; + } + +}
\ No newline at end of file diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts new file mode 100644 index 000000000..74f45ae62 --- /dev/null +++ b/src/server/ApiManagers/UploadManager.ts @@ -0,0 +1,222 @@ +import ApiManager, { Registration } from "./ApiManager"; +import { Method, _success } from "../RouteManager"; +import * as formidable from 'formidable'; +import v4 = require('uuid/v4'); +const AdmZip = require('adm-zip'); +import { extname, basename, dirname } from 'path'; +import { createReadStream, createWriteStream, unlink, readFileSync } from "fs"; +import { publicDirectory, filesDirectory } from ".."; +import { Database } from "../database"; +import { DashUploadUtils, SizeSuffix } from "../DashUploadUtils"; +import * as sharp from 'sharp'; +import { AcceptibleMedia } from "../SharedMediaTypes"; +import { normalize } from "path"; +const imageDataUri = require('image-data-uri'); + +export enum Directory { + parsed_files = "parsed_files", + images = "images", + videos = "videos", + pdfs = "pdfs", + text = "text", + pdf_thumbnails = "pdf_thumbnails" +} + +export function serverPathToFile(directory: Directory, filename: string) { + return normalize(`${filesDirectory}/${directory}/${filename}`); +} + +export function pathToDirectory(directory: Directory) { + return normalize(`${filesDirectory}/${directory}`); +} + +export function clientPathToFile(directory: Directory, filename: string) { + return `/files/${directory}/${filename}`; +} + +export default class UploadManager extends ApiManager { + + protected initialize(register: Registration): void { + + register({ + method: Method.POST, + subscription: "/upload", + secureHandler: async ({ req, res }) => { + const form = new formidable.IncomingForm(); + form.uploadDir = pathToDirectory(Directory.parsed_files); + form.keepExtensions = true; + return new Promise<void>(resolve => { + form.parse(req, async (_err, _fields, files) => { + const results: any[] = []; + for (const key in files) { + const result = await DashUploadUtils.upload(files[key]); + result && results.push(result); + } + _success(res, results); + resolve(); + }); + }); + } + }); + + register({ + method: Method.POST, + subscription: "/uploadDoc", + secureHandler: ({ req, res }) => { + const form = new formidable.IncomingForm(); + form.keepExtensions = true; + // let path = req.body.path; + const ids: { [id: string]: string } = {}; + let remap = true; + const getId = (id: string): string => { + if (!remap) return id; + if (id.endsWith("Proto")) return id; + if (id in ids) { + return ids[id]; + } else { + return ids[id] = v4(); + } + }; + const mapFn = (doc: any) => { + if (doc.id) { + doc.id = getId(doc.id); + } + for (const key in doc.fields) { + if (!doc.fields.hasOwnProperty(key)) { + continue; + } + const field = doc.fields[key]; + if (field === undefined || field === null) { + continue; + } + + if (field.__type === "proxy" || field.__type === "prefetch_proxy") { + field.fieldId = getId(field.fieldId); + } else if (field.__type === "script" || field.__type === "computed") { + if (field.captures) { + field.captures.fieldId = getId(field.captures.fieldId); + } + } else if (field.__type === "list") { + mapFn(field); + } else if (typeof field === "string") { + const re = /("(?:dataD|d)ocumentId"\s*:\s*")([\w\-]*)"/g; + doc.fields[key] = (field as any).replace(re, (match: any, p1: string, p2: string) => { + return `${p1}${getId(p2)}"`; + }); + } else if (field.__type === "RichTextField") { + const re = /("href"\s*:\s*")(.*?)"/g; + field.Data = field.Data.replace(re, (match: any, p1: string, p2: string) => { + return `${p1}${getId(p2)}"`; + }); + } + } + }; + return new Promise<void>(resolve => { + form.parse(req, async (_err, fields, files) => { + remap = fields.remap !== "false"; + let id: string = ""; + try { + for (const name in files) { + const path_2 = files[name].path; + const zip = new AdmZip(path_2); + zip.getEntries().forEach((entry: any) => { + if (!entry.entryName.startsWith("files/")) return; + let directory = dirname(entry.entryName) + "/"; + const extension = extname(entry.entryName); + const base = basename(entry.entryName).split(".")[0]; + try { + zip.extractEntryTo(entry.entryName, publicDirectory, true, false); + directory = "/" + directory; + + createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + "_o" + extension)); + createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + "_s" + extension)); + createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + "_m" + extension)); + createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + "_l" + extension)); + } catch (e) { + console.log(e); + } + }); + const json = zip.getEntry("doc.json"); + let docs: any; + try { + const data = JSON.parse(json.getData().toString("utf8")); + docs = data.docs; + id = data.id; + docs = Object.keys(docs).map(key => docs[key]); + docs.forEach(mapFn); + await Promise.all(docs.map((doc: any) => new Promise(res => Database.Instance.replace(doc.id, doc, (err, r) => { + err && console.log(err); + res(); + }, true, "newDocuments")))); + } catch (e) { console.log(e); } + unlink(path_2, () => { }); + } + if (id) { + res.send(JSON.stringify(getId(id))); + } else { + res.send(JSON.stringify("error")); + } + } catch (e) { console.log(e); } + resolve(); + }); + }); + } + }); + + register({ + method: Method.POST, + subscription: "/inspectImage", + secureHandler: async ({ req, res }) => { + const { source } = req.body; + if (typeof source === "string") { + const { serverAccessPaths } = await DashUploadUtils.UploadImage(source); + return res.send(await DashUploadUtils.InspectImage(serverAccessPaths[SizeSuffix.Original])); + } + res.send({}); + } + }); + + register({ + method: Method.POST, + subscription: "/uploadURI", + secureHandler: ({ req, res }) => { + const uri = req.body.uri; + const filename = req.body.name; + if (!uri || !filename) { + res.status(401).send("incorrect parameters specified"); + return; + } + return imageDataUri.outputFile(uri, serverPathToFile(Directory.images, filename)).then((savedName: string) => { + const ext = extname(savedName).toLowerCase(); + const { pngs, jpgs } = AcceptibleMedia; + const resizers = [ + { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, + { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: "_m" }, + { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: "_l" }, + ]; + let isImage = false; + if (pngs.includes(ext)) { + resizers.forEach(element => { + element.resizer = element.resizer.png(); + }); + isImage = true; + } else if (jpgs.includes(ext)) { + resizers.forEach(element => { + element.resizer = element.resizer.jpeg(); + }); + isImage = true; + } + if (isImage) { + resizers.forEach(resizer => { + const path = serverPathToFile(Directory.images, filename + resizer.suffix + ext); + createReadStream(savedName).pipe(resizer.resizer).pipe(createWriteStream(path)); + }); + } + res.send(clientPathToFile(Directory.images, filename + ext)); + }); + } + }); + + } + +}
\ No newline at end of file diff --git a/src/server/ApiManagers/UserManager.ts b/src/server/ApiManagers/UserManager.ts new file mode 100644 index 000000000..f2ef22961 --- /dev/null +++ b/src/server/ApiManagers/UserManager.ts @@ -0,0 +1,71 @@ +import ApiManager, { Registration } from "./ApiManager"; +import { Method } from "../RouteManager"; +import { Database } from "../database"; +import { msToTime } from "../ActionUtilities"; + +export const timeMap: { [id: string]: number } = {}; +interface ActivityUnit { + user: string; + duration: number; +} + +export default class UserManager extends ApiManager { + + protected initialize(register: Registration): void { + + register({ + method: Method.GET, + subscription: "/getUsers", + secureHandler: async ({ res }) => { + const cursor = await Database.Instance.query({}, { email: 1, userDocumentId: 1 }, "users"); + const results = await cursor.toArray(); + res.send(results.map(user => ({ email: user.email, userDocumentId: user.userDocumentId }))); + } + }); + + register({ + method: Method.GET, + subscription: "/getUserDocumentId", + secureHandler: ({ res, user }) => res.send(user.userDocumentId) + }); + + register({ + method: Method.GET, + subscription: "/getCurrentUser", + secureHandler: ({ res, user }) => res.send(JSON.stringify(user)), + publicHandler: ({ res }) => res.send(JSON.stringify({ id: "__guest__", email: "" })) + }); + + register({ + method: Method.GET, + subscription: "/activity", + secureHandler: ({ res }) => { + const now = Date.now(); + + const activeTimes: ActivityUnit[] = []; + const inactiveTimes: ActivityUnit[] = []; + + for (const user in timeMap) { + const time = timeMap[user]; + const duration = now - time; + const target = (duration / 1000) < (60 * 5) ? activeTimes : inactiveTimes; + target.push({ user, duration }); + } + + const process = (target: { user: string, duration: number }[]) => { + const comparator = (first: ActivityUnit, second: ActivityUnit) => first.duration - second.duration; + const sorted = target.sort(comparator); + return sorted.map(({ user, duration }) => `${user} (${msToTime(duration)})`); + }; + + res.render("user_activity.pug", { + title: "User Activity", + active: process(activeTimes), + inactive: process(inactiveTimes) + }); + } + }); + + } + +}
\ No newline at end of file diff --git a/src/server/ApiManagers/UtilManager.ts b/src/server/ApiManagers/UtilManager.ts new file mode 100644 index 000000000..a0d0d0f4b --- /dev/null +++ b/src/server/ApiManagers/UtilManager.ts @@ -0,0 +1,75 @@ +import ApiManager, { Registration } from "./ApiManager"; +import { Method } from "../RouteManager"; +import { exec } from 'child_process'; +import { command_line } from "../ActionUtilities"; +import RouteSubscriber from "../RouteSubscriber"; +import { red } from "colors"; + +export default class UtilManager extends ApiManager { + + protected initialize(register: Registration): void { + + register({ + method: Method.GET, + subscription: new RouteSubscriber("environment").add("key"), + secureHandler: ({ req, res }) => { + const { key } = req.params; + const value = process.env[key]; + if (!value) { + console.log(red(`process.env.${key} is not defined.`)); + } + return res.send(value); + } + }); + + register({ + method: Method.GET, + subscription: "/pull", + secureHandler: async ({ res }) => { + return new Promise<void>(resolve => { + exec('"C:\\Program Files\\Git\\git-bash.exe" -c "git pull"', err => { + if (err) { + res.send(err.message); + return; + } + res.redirect("/"); + resolve(); + }); + }); + } + }); + + register({ + method: Method.GET, + subscription: "/buxton", + secureHandler: async ({ res }) => { + const cwd = './src/scraping/buxton'; + + const onResolved = (stdout: string) => { console.log(stdout); res.redirect("/"); }; + const onRejected = (err: any) => { console.error(err.message); res.send(err); }; + const tryPython3 = () => command_line('python3 scraper.py', cwd).then(onResolved, onRejected); + + return command_line('python scraper.py', cwd).then(onResolved, tryPython3); + }, + }); + + register({ + method: Method.GET, + subscription: "/version", + secureHandler: ({ res }) => { + return new Promise<void>(resolve => { + exec('"C:\\Program Files\\Git\\bin\\git.exe" rev-parse HEAD', (err, stdout) => { + if (err) { + res.send(err.message); + return; + } + res.send(stdout); + }); + resolve(); + }); + } + }); + + } + +}
\ No newline at end of file diff --git a/src/server/DashSession.ts b/src/server/DashSession.ts new file mode 100644 index 000000000..83ce7caaf --- /dev/null +++ b/src/server/DashSession.ts @@ -0,0 +1,62 @@ +import { Session } from "./Session/session"; +import { Email } from "./ActionUtilities"; +import { red, yellow } from "colors"; +import { SolrManager } from "./ApiManagers/SearchManager"; +import { execSync } from "child_process"; +import { Utils } from "../Utils"; +import { WebSocket } from "./Websocket/Websocket"; +import { MessageStore } from "./Message"; +import { launchServer } from "."; + +/** +* If we're the monitor (master) thread, we should launch the monitor logic for the session. +* Otherwise, we must be on a worker thread that was spawned *by* the monitor (master) thread, and thus +* our job should be to run the server. +*/ +export class DashSessionAgent extends Session.AppliedSessionAgent { + + private readonly notificationRecipients = ["samuel_wilkins@brown.edu"]; + private readonly signature = "-Dash Server Session Manager"; + + protected async launchMonitor() { + const monitor = Session.Monitor.Create({ + key: async key => { + // this sends a pseudorandomly generated guid to the configuration's recipients, allowing them alone + // to kill the server via the /kill/:key route + const content = `The key for this session (started @ ${new Date().toUTCString()}) is ${key}.\n\n${this.signature}`; + const failures = await Email.dispatchAll(this.notificationRecipients, "Server Termination Key", content); + if (failures) { + failures.map(({ recipient, error: { message } }) => monitor.log(red(`dispatch failure @ ${recipient} (${yellow(message)})`))); + return false; + } + return true; + }, + crash: async ({ name, message, stack }) => { + const body = [ + "You, as a Dash Administrator, are being notified of a server crash event. Here's what we know:", + `name:\n${name}`, + `message:\n${message}`, + `stack:\n${stack}`, + "The server is already restarting itself, but if you're concerned, use the Remote Desktop Connection to monitor progress.", + ].join("\n\n"); + const content = `${body}\n\n${this.signature}`; + const failures = await Email.dispatchAll(this.notificationRecipients, "Dash Web Server Crash", content); + if (failures) { + failures.map(({ recipient, error: { message } }) => monitor.log(red(`dispatch failure @ ${recipient} (${yellow(message)})`))); + return false; + } + return true; + } + }); + monitor.addReplCommand("pull", [], () => execSync("git pull", { stdio: ["ignore", "inherit", "inherit"] })); + monitor.addReplCommand("solr", [/start|stop/], args => SolrManager.SetRunning(args[0] === "start")); + return monitor; + } + + protected async launchServerWorker() { + const worker = Session.ServerWorker.Create(launchServer); // server initialization delegated to worker + worker.addExitHandler(() => Utils.Emit(WebSocket._socket, MessageStore.ConnectionTerminated, "Manual")); + return worker; + } + +}
\ No newline at end of file diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index 46d897339..d9d985ca5 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -5,41 +5,105 @@ import * as sharp from 'sharp'; import request = require('request-promise'); import { ExifData, ExifImage } from 'exif'; import { Opt } from '../new_fields/Doc'; +import { AcceptibleMedia } from './SharedMediaTypes'; +import { filesDirectory } from '.'; +import { File } from 'formidable'; +import { basename } from "path"; +import { createIfNotExists } from './ActionUtilities'; +import { ParsedPDF } from "../server/PdfTypes"; +const parse = require('pdf-parse'); +import { Directory, serverPathToFile, clientPathToFile } from './ApiManagers/UploadManager'; +import { red } from 'colors'; -const uploadDirectory = path.join(__dirname, './public/files/'); +export enum SizeSuffix { + Small = "_s", + Medium = "_m", + Large = "_l", + Original = "_o" +} + +export function InjectSize(filename: string, size: SizeSuffix) { + const extension = path.extname(filename).toLowerCase(); + return filename.substring(0, filename.length - extension.length) + size + extension; +} export namespace DashUploadUtils { export interface Size { width: number; - suffix: string; + suffix: SizeSuffix; + } + + export interface ImageFileResponse { + name: string; + path: string; + type: string; + exif: Opt<DashUploadUtils.EnrichedExifData>; } export const Sizes: { [size: string]: Size } = { - SMALL: { width: 100, suffix: "_s" }, - MEDIUM: { width: 400, suffix: "_m" }, - LARGE: { width: 900, suffix: "_l" }, + SMALL: { width: 100, suffix: SizeSuffix.Small }, + MEDIUM: { width: 400, suffix: SizeSuffix.Medium }, + LARGE: { width: 900, suffix: SizeSuffix.Large }, }; - const gifs = [".gif"]; - const pngs = [".png"]; - const jpgs = [".jpg", ".jpeg"]; - export const imageFormats = [...pngs, ...jpgs, ...gifs]; - const videoFormats = [".mov", ".mp4"]; + export function validateExtension(url: string) { + return AcceptibleMedia.imageFormats.includes(path.extname(url).toLowerCase()); + } const size = "content-length"; const type = "content-type"; - export interface UploadInformation { - mediaPaths: string[]; - fileNames: { [key: string]: string }; + export interface ImageUploadInformation { + clientAccessPath: string; + serverAccessPaths: { [key: string]: string }; exifData: EnrichedExifData; contentSize?: number; contentType?: string; } + const { imageFormats, videoFormats, applicationFormats } = AcceptibleMedia; + + export async function upload(file: File): Promise<any> { + const { type, path, name } = file; + const types = type.split("/"); + + const category = types[0]; + const format = `.${types[1]}`; + + switch (category) { + case "image": + if (imageFormats.includes(format)) { + const results = await UploadImage(path, basename(path), format); + return { ...results, name, type }; + } + case "video": + if (videoFormats.includes(format)) { + return MoveParsedFile(path, Directory.videos); + } + case "application": + if (applicationFormats.includes(format)) { + return UploadPdf(path); + } + } + + console.log(red(`Ignoring unsupported file (${name}) with upload type (${type}).`)); + return { clientAccessPath: undefined }; + } + + async function UploadPdf(absolutePath: string) { + const dataBuffer = fs.readFileSync(absolutePath); + const result: ParsedPDF = await parse(dataBuffer); + const parsedName = basename(absolutePath); + await new Promise<void>((resolve, reject) => { + const textFilename = `${parsedName.substring(0, parsedName.length - 4)}.txt`; + const writeStream = fs.createWriteStream(serverPathToFile(Directory.text, textFilename)); + writeStream.write(result.text, error => error ? reject(error) : resolve()); + }); + return MoveParsedFile(absolutePath, Directory.pdfs); + } + const generate = (prefix: string, url: string) => `${prefix}upload_${Utils.GenerateGuid()}${sanitizeExtension(url)}`; - const sanitize = (filename: string) => filename.replace(/\s+/g, "_"); const sanitizeExtension = (source: string) => { let extension = path.extname(source); extension = extension.toLowerCase(); @@ -58,15 +122,15 @@ export namespace DashUploadUtils { * @param {string} prefix is a string prepended to the generated image name in the * event that @param filename is not specified * - * @returns {UploadInformation} This method returns + * @returns {ImageUploadInformation} This method returns * 1) the paths to the uploaded images (plural due to resizing) * 2) the file name of each of the resized images * 3) the size of the image, in bytes (4432130) * 4) the content type of the image, i.e. image/(jpeg | png | ...) */ - export const UploadImage = async (source: string, filename?: string, prefix: string = ""): Promise<UploadInformation> => { + export const UploadImage = async (source: string, filename?: string, format?: string, prefix: string = ""): Promise<ImageUploadInformation> => { const metadata = await InspectImage(source); - return UploadInspectedImage(metadata, filename, prefix); + return UploadInspectedImage(metadata, filename, format, prefix); }; export interface InspectionResults { @@ -83,6 +147,11 @@ export namespace DashUploadUtils { error?: string; } + export async function buildFileDirectories() { + const pending = Object.keys(Directory).map(sub => createIfNotExists(`${filesDirectory}/${sub}`)); + return Promise.all(pending); + } + /** * Based on the url's classification as local or remote, gleans * as much information as possible about the specified image @@ -102,65 +171,63 @@ export namespace DashUploadUtils { if (isLocal) { return results; } - const metadata = (await new Promise<any>((resolve, reject) => { - request.head(source, async (error, res) => { - if (error) { - return reject(error); - } - resolve(res); - }); - })).headers; + const { headers } = (await new Promise<any>((resolve, reject) => { + request.head(source, (error, res) => error ? reject(error) : resolve(res)); + })); return { - contentSize: parseInt(metadata[size]), - contentType: metadata[type], + contentSize: parseInt(headers[size]), + contentType: headers[type], ...results }; }; - export const UploadInspectedImage = async (metadata: InspectionResults, filename?: string, prefix = ""): Promise<UploadInformation> => { + export async function MoveParsedFile(absolutePath: string, destination: Directory): Promise<{ clientAccessPath: Opt<string> }> { + return new Promise<{ clientAccessPath: Opt<string> }>(resolve => { + const filename = basename(absolutePath); + const destinationPath = serverPathToFile(destination, filename); + fs.rename(absolutePath, destinationPath, error => { + resolve({ clientAccessPath: error ? undefined : clientPathToFile(destination, filename) }); + }); + }); + } + + export const UploadInspectedImage = async (metadata: InspectionResults, filename?: string, format?: string, prefix = ""): Promise<ImageUploadInformation> => { const { isLocal, stream, normalizedUrl, contentSize, contentType, exifData } = metadata; - const resolved = filename ? sanitize(filename) : generate(prefix, normalizedUrl); - const extension = sanitizeExtension(normalizedUrl || resolved); - let information: UploadInformation = { - mediaPaths: [], - fileNames: { clean: resolved }, + const resolved = filename || generate(prefix, normalizedUrl); + const extension = format || sanitizeExtension(normalizedUrl || resolved); + const information: ImageUploadInformation = { + clientAccessPath: clientPathToFile(Directory.images, resolved), + serverAccessPaths: {}, exifData, contentSize, contentType, }; - return new Promise<UploadInformation>(async (resolve, reject) => { + const { pngs, jpgs } = AcceptibleMedia; + return new Promise<ImageUploadInformation>(async (resolve, reject) => { const resizers = [ - { resizer: sharp().rotate(), suffix: "_o" }, + { resizer: sharp().rotate(), suffix: SizeSuffix.Original }, ...Object.values(Sizes).map(size => ({ resizer: sharp().resize(size.width, undefined, { withoutEnlargement: true }).rotate(), suffix: size.suffix })) ]; - let nonVisual = false; if (pngs.includes(extension)) { resizers.forEach(element => element.resizer = element.resizer.png()); } else if (jpgs.includes(extension)) { resizers.forEach(element => element.resizer = element.resizer.jpeg()); - } else if (![...imageFormats, ...videoFormats].includes(extension.toLowerCase())) { - nonVisual = true; } - if (imageFormats.includes(extension)) { - for (let resizer of resizers) { - const suffix = resizer.suffix; - let mediaPath: string; - await new Promise<void>(resolve => { - const filename = resolved.substring(0, resolved.length - extension.length) + suffix + extension; - information.mediaPaths.push(mediaPath = uploadDirectory + filename); - information.fileNames[suffix] = filename; - stream(normalizedUrl).pipe(resizer.resizer).pipe(fs.createWriteStream(mediaPath)) - .on('close', resolve) - .on('error', reject); - }); - } - } - if (!isLocal || nonVisual) { + for (const { resizer, suffix } of resizers) { await new Promise<void>(resolve => { - stream(normalizedUrl).pipe(fs.createWriteStream(uploadDirectory + resolved)).on('close', resolve); + const filename = InjectSize(resolved, suffix); + information.serverAccessPaths[suffix] = serverPathToFile(Directory.images, filename); + stream(normalizedUrl).pipe(resizer).pipe(fs.createWriteStream(serverPathToFile(Directory.images, filename))) + .on('close', resolve) + .on('error', reject); + }); + } + if (isLocal) { + await new Promise<boolean>(resolve => { + fs.unlink(normalizedUrl, error => resolve(error === null)); }); } resolve(information); @@ -188,13 +255,4 @@ export namespace DashUploadUtils { }); }; - export const createIfNotExists = async (path: string) => { - if (await new Promise<boolean>(resolve => fs.exists(path, resolve))) { - return true; - } - return new Promise<boolean>(resolve => fs.mkdir(path, error => resolve(error === null))); - }; - - export const Destroy = (mediaPath: string) => new Promise<boolean>(resolve => fs.unlink(mediaPath, error => resolve(error === null))); - }
\ No newline at end of file diff --git a/src/server/GarbageCollector.ts b/src/server/GarbageCollector.ts index 09b52eadf..5729c3ee5 100644 --- a/src/server/GarbageCollector.ts +++ b/src/server/GarbageCollector.ts @@ -100,7 +100,7 @@ async function GarbageCollect(full: boolean = true) { if (!full) { await Database.Instance.updateMany({ _id: { $nin: notToDelete } }, { $set: { "deleted": true } }); await Database.Instance.updateMany({ _id: { $in: notToDelete } }, { $unset: { "deleted": true } }); - console.log(await Search.Instance.updateDocuments( + console.log(await Search.updateDocuments( notToDelete.map<any>(id => ({ id, deleted: { set: null } })) @@ -122,7 +122,7 @@ async function GarbageCollect(full: boolean = true) { // const result = await Database.Instance.delete({ _id: { $in: toDelete } }, "newDocuments"); console.log(`${deleted} documents deleted`); - await Search.Instance.deleteDocuments(toDelete); + await Search.deleteDocuments(toDelete); console.log("Cleared search documents"); const folder = "./src/server/public/files/"; diff --git a/src/server/Message.ts b/src/server/Message.ts index aaee143e8..621abfd1e 100644 --- a/src/server/Message.ts +++ b/src/server/Message.ts @@ -50,6 +50,7 @@ export namespace MessageStore { export const GetFields = new Message<string[]>("Get Fields"); // send string[] of 'id' get Transferable[] back export const GetDocument = new Message<string>("Get Document"); export const DeleteAll = new Message<any>("Delete All"); + export const ConnectionTerminated = new Message<string>("Connection Terminated"); export const GetRefField = new Message<string>("Get Ref Field"); export const GetRefFields = new Message<string[]>("Get Ref Fields"); diff --git a/src/server/ProcessFactory.ts b/src/server/ProcessFactory.ts new file mode 100644 index 000000000..acb8b3a99 --- /dev/null +++ b/src/server/ProcessFactory.ts @@ -0,0 +1,44 @@ +import { existsSync, mkdirSync } from "fs"; +import { pathFromRoot, fileDescriptorFromStream } from './ActionUtilities'; +import rimraf = require("rimraf"); +import { ChildProcess, spawn, StdioOptions } from "child_process"; +import { Stream } from "stream"; + +export namespace ProcessFactory { + + export type Sink = "pipe" | "ipc" | "ignore" | "inherit" | Stream | number | null | undefined; + + export async function createWorker(command: string, args?: readonly string[], stdio?: StdioOptions | "logfile", detached = true): Promise<ChildProcess> { + if (stdio === "logfile") { + const log_fd = await Logger.create(command, args); + stdio = ["ignore", log_fd, log_fd]; + } + const child = spawn(command, args, { detached, stdio }); + child.unref(); + return child; + } + +} + +export namespace Logger { + + const logPath = pathFromRoot("./logs"); + + export async function initialize() { + if (existsSync(logPath)) { + if (!process.env.SPAWNED) { + await new Promise<any>(resolve => rimraf(logPath, resolve)); + } + } + mkdirSync(logPath); + } + + export async function create(command: string, args?: readonly string[]): Promise<number> { + return fileDescriptorFromStream(generate_log_path(command, args)); + } + + function generate_log_path(command: string, args?: readonly string[]) { + return pathFromRoot(`./logs/${command}-${args?.length}-${new Date().toUTCString()}.log`); + } + +}
\ No newline at end of file diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts new file mode 100644 index 000000000..25259bd88 --- /dev/null +++ b/src/server/RouteManager.ts @@ -0,0 +1,196 @@ +import RouteSubscriber from "./RouteSubscriber"; +import { DashUserModel } from "./authentication/models/user_model"; +import * as express from 'express'; +import { cyan, red, green } from 'colors'; + +export enum Method { + GET, + POST +} + +export interface CoreArguments { + req: express.Request; + res: express.Response; + isRelease: boolean; +} + +export type SecureHandler = (core: CoreArguments & { user: DashUserModel }) => any | Promise<any>; +export type PublicHandler = (core: CoreArguments) => any | Promise<any>; +export type ErrorHandler = (core: CoreArguments & { error: any }) => any | Promise<any>; + +export interface RouteInitializer { + method: Method; + subscription: string | RouteSubscriber | (string | RouteSubscriber)[]; + secureHandler: SecureHandler; + publicHandler?: PublicHandler; + errorHandler?: ErrorHandler; +} + +const registered = new Map<string, Set<Method>>(); + +enum RegistrationError { + Malformed, + Duplicate +} + +export default class RouteManager { + private server: express.Express; + private _isRelease: boolean; + private failedRegistrations: { route: string, reason: RegistrationError }[] = []; + + public get isRelease() { + return this._isRelease; + } + + constructor(server: express.Express, isRelease: boolean) { + this.server = server; + this._isRelease = isRelease; + } + + logRegistrationOutcome = () => { + if (this.failedRegistrations.length) { + let duplicateCount = 0; + let malformedCount = 0; + this.failedRegistrations.forEach(({ reason, route }) => { + let error: string; + if (reason === RegistrationError.Duplicate) { + error = `duplicate registration error: ${route} is already registered `; + duplicateCount++; + } else { + error = `malformed route error: ${route} is invalid`; + malformedCount++; + } + console.log(red(error)); + }); + console.log(); + if (duplicateCount) { + console.log('please remove all duplicate routes before continuing'); + } + if (malformedCount) { + console.log(`please ensure all routes adhere to ^\/$|^\/[A-Za-z]+(\/\:[A-Za-z]+)*$`); + } + process.exit(1); + } else { + console.log(green("all server routes have been successfully registered:")); + Array.from(registered.keys()).sort().forEach(route => console.log(cyan(route))); + console.log(); + } + } + + /** + * + * @param initializer + */ + addSupervisedRoute = (initializer: RouteInitializer): void => { + const { method, subscription, secureHandler: onValidation, publicHandler: onUnauthenticated, errorHandler: onError } = initializer; + const isRelease = this._isRelease; + const supervised = async (req: express.Request, res: express.Response) => { + const { user, originalUrl: target } = req; + const core = { req, res, isRelease }; + const tryExecute = async (toExecute: (args: any) => any | Promise<any>, args: any) => { + try { + await toExecute(args); + } catch (e) { + console.log(red(target), user && ("email" in user) ? "<user logged out>" : undefined); + if (onError) { + onError({ ...core, error: e }); + } else { + _error(res, `The server encountered an internal error when serving ${target}.`, e); + } + } + }; + if (user) { + await tryExecute(onValidation, { ...core, user }); + } else { + req.session!.target = target; + if (onUnauthenticated) { + await tryExecute(onUnauthenticated, core); + if (!res.headersSent) { + res.redirect("/login"); + } + } else { + res.redirect("/login"); + } + } + setTimeout(() => { + if (!res.headersSent) { + console.log(red(`Initiating fallback for ${target}. Please remove dangling promise from route handler`)); + const warning = `request to ${target} fell through - this is a fallback response`; + res.send({ warning }); + } + }, 1000); + }; + const subscribe = (subscriber: RouteSubscriber | string) => { + let route: string; + if (typeof subscriber === "string") { + route = subscriber; + } else { + route = subscriber.build; + } + if (!/^\/$|^\/[A-Za-z]+(\/\:[A-Za-z]+)*$/g.test(route)) { + this.failedRegistrations.push({ + reason: RegistrationError.Malformed, + route + }); + } else { + const existing = registered.get(route); + if (existing) { + if (existing.has(method)) { + this.failedRegistrations.push({ + reason: RegistrationError.Duplicate, + route + }); + return; + } + } else { + const specific = new Set<Method>(); + specific.add(method); + registered.set(route, specific); + } + switch (method) { + case Method.GET: + this.server.get(route, supervised); + break; + case Method.POST: + this.server.post(route, supervised); + break; + } + } + }; + if (Array.isArray(subscription)) { + subscription.forEach(subscribe); + } else { + subscribe(subscription); + } + } + +} + +export const STATUS = { + OK: 200, + BAD_REQUEST: 400, + EXECUTION_ERROR: 500, + PERMISSION_DENIED: 403 +}; + +export function _error(res: express.Response, message: string, error?: any) { + console.error(message); + res.statusMessage = message; + res.status(STATUS.EXECUTION_ERROR).send(error); +} + +export function _success(res: express.Response, body: any) { + res.status(STATUS.OK).send(body); +} + +export function _invalid(res: express.Response, message: string) { + res.statusMessage = message; + res.status(STATUS.BAD_REQUEST).send(); +} + +export function _permission_denied(res: express.Response, message?: string) { + if (message) { + res.statusMessage = message; + } + res.status(STATUS.BAD_REQUEST).send("Permission Denied!"); +} diff --git a/src/server/RouteStore.ts b/src/server/RouteStore.ts deleted file mode 100644 index 7426ffb39..000000000 --- a/src/server/RouteStore.ts +++ /dev/null @@ -1,43 +0,0 @@ -// PREPEND ALL ROUTES WITH FORWARD SLASHES! - -export enum RouteStore { - // GENERAL - root = "/", - home = "/home", - corsProxy = "/corsProxy", - delete = "/delete", - deleteAll = "/deleteAll", - - // UPLOAD AND STATIC FILE SERVING - public = "/public", - upload = "/upload", - dataUriToImage = "/uploadURI", - images = "/images", - inspectImage = "/inspectImage", - imageHierarchyExport = "/imageHierarchyExport", - - // USER AND WORKSPACES - getCurrUser = "/getCurrentUser", - getUsers = "/getUsers", - getUserDocumentId = "/getUserDocumentId", - updateCursor = "/updateCursor", - - openDocumentWithId = "/doc/:docId", - - // AUTHENTICATION - signup = "/signup", - login = "/login", - logout = "/logout", - forgot = "/forgotpassword", - reset = "/reset/:token", - - // APIS - cognitiveServices = "/cognitiveservices", - googleDocs = "/googleDocs", - readGoogleAccessToken = "/readGoogleAccessToken", - writeGoogleAccessToken = "/writeGoogleAccessToken", - googlePhotosMediaUpload = "/googlePhotosMediaUpload", - googlePhotosMediaDownload = "/googlePhotosMediaDownload", - googleDocsGet = "/googleDocsGet" - -}
\ No newline at end of file diff --git a/src/server/RouteSubscriber.ts b/src/server/RouteSubscriber.ts index e49be8af5..a1cf7c1c4 100644 --- a/src/server/RouteSubscriber.ts +++ b/src/server/RouteSubscriber.ts @@ -3,7 +3,7 @@ export default class RouteSubscriber { private requestParameters: string[] = []; constructor(root: string) { - this._root = root; + this._root = `/${root}`; } add(...parameters: string[]) { diff --git a/src/server/Search.ts b/src/server/Search.ts index 723dc101b..2b59c14b1 100644 --- a/src/server/Search.ts +++ b/src/server/Search.ts @@ -1,14 +1,12 @@ import * as rp from 'request-promise'; -import { Database } from './database'; -import { thisExpression } from 'babel-types'; -export class Search { - public static Instance = new Search(); - private url = 'http://localhost:8983/solr/'; +const pathTo = (relative: string) => `http://localhost:8983/solr/dash/${relative}`; - public async updateDocument(document: any) { +export namespace Search { + + export async function updateDocument(document: any) { try { - const res = await rp.post(this.url + "dash/update", { + const res = await rp.post(pathTo("update"), { headers: { 'content-type': 'application/json' }, body: JSON.stringify([document]) }); @@ -18,9 +16,9 @@ export class Search { } } - public async updateDocuments(documents: any[]) { + export async function updateDocuments(documents: any[]) { try { - const res = await rp.post(this.url + "dash/update", { + const res = await rp.post(pathTo("update"), { headers: { 'content-type': 'application/json' }, body: JSON.stringify(documents) }); @@ -30,9 +28,9 @@ export class Search { } } - public async search(query: any) { + export async function search(query: any) { try { - const searchResults = JSON.parse(await rp.get(this.url + "dash/select", { + const searchResults = JSON.parse(await rp.get(pathTo("select"), { qs: query })); const { docs, numFound } = searchResults.response; @@ -43,9 +41,9 @@ export class Search { } } - public async clear() { + export async function clear() { try { - return await rp.post(this.url + "dash/update", { + return rp.post(pathTo("update"), { body: { delete: { query: "*:*" @@ -56,7 +54,7 @@ export class Search { } catch { } } - public deleteDocuments(docs: string[]) { + export async function deleteDocuments(docs: string[]) { const promises: rp.RequestPromise[] = []; const nToDelete = 1000; let index = 0; @@ -64,7 +62,7 @@ export class Search { const count = Math.min(docs.length - index, nToDelete); const deleteIds = docs.slice(index, index + count); index += count; - promises.push(rp.post(this.url + "dash/update", { + promises.push(rp.post(pathTo("update"), { body: { delete: { query: deleteIds.map(id => `id:"${id}"`).join(" ") diff --git a/src/server/Session/session.ts b/src/server/Session/session.ts new file mode 100644 index 000000000..06a076ae4 --- /dev/null +++ b/src/server/Session/session.ts @@ -0,0 +1,592 @@ +import { red, cyan, green, yellow, magenta, blue, white } from "colors"; +import { on, fork, setupMaster, Worker, isMaster, isWorker } from "cluster"; +import { get } from "request-promise"; +import { Utils } from "../../Utils"; +import Repl, { ReplAction } from "../repl"; +import { readFileSync } from "fs"; +import { validate, ValidationError } from "jsonschema"; +import { configurationSchema } from "./session_config_schema"; + +/** + * This namespace relies on NodeJS's cluster module, which allows a parent (master) process to share + * code with its children (workers). A simple `isMaster` flag indicates who is trying to access + * the code, and thus determines the functionality that actually gets invoked (checked by the caller, not internally). + * + * Think of the master thread as a factory, and the workers as the helpers that actually run the server. + * + * So, when we run `npm start`, given the appropriate check, initializeMaster() is called in the parent process + * This will spawn off its own child process (by default, mirrors the execution path of its parent), + * in which initializeWorker() is invoked. + */ +export namespace Session { + + export abstract class AppliedSessionAgent { + + // the following two methods allow the developer to create a custom + // session and use the built in customization options for each thread + protected abstract async launchMonitor(): Promise<Session.Monitor>; + protected abstract async launchServerWorker(): Promise<Session.ServerWorker>; + + private launched = false; + + public killSession = (reason: string, graceful = true, errorCode = 0) => { + const target = isMaster ? this.sessionMonitor : this.serverWorker; + target.killSession(reason, graceful, errorCode); + } + + private sessionMonitorRef: Session.Monitor | undefined; + public get sessionMonitor(): Session.Monitor { + if (!isMaster) { + this.serverWorker.sendMonitorAction("kill", { + graceful: false, + reason: "Cannot access the session monitor directly from the server worker thread.", + errorCode: 1 + }); + throw new Error(); + } + return this.sessionMonitorRef!; + } + + private serverWorkerRef: Session.ServerWorker | undefined; + public get serverWorker(): Session.ServerWorker { + if (isMaster) { + throw new Error("Cannot access the server worker directly from the session monitor thread"); + } + return this.serverWorkerRef!; + } + + public async launch(): Promise<void> { + if (!this.launched) { + this.launched = true; + if (isMaster) { + this.sessionMonitorRef = await this.launchMonitor(); + } else { + this.serverWorkerRef = await this.launchServerWorker(); + } + } else { + throw new Error("Cannot launch a session thread more than once per process."); + } + } + + } + + interface Configuration { + showServerOutput: boolean; + masterIdentifier: string; + workerIdentifier: string; + ports: { [description: string]: number }; + pollingRoute: string; + pollingIntervalSeconds: number; + pollingFailureTolerance: number; + [key: string]: any; + } + + const defaultConfiguration: Configuration = { + showServerOutput: false, + masterIdentifier: yellow("__monitor__:"), + workerIdentifier: magenta("__server__:"), + ports: { server: 3000 }, + pollingRoute: "/", + pollingIntervalSeconds: 30, + pollingFailureTolerance: 0 + }; + + export type ExitHandler = (reason: Error | null) => void | Promise<void>; + + export namespace Monitor { + + export interface NotifierHooks { + key?: (key: string) => (boolean | Promise<boolean>); + crash?: (error: Error) => (boolean | Promise<boolean>); + } + + export interface Action { + message: string; + args: any; + } + + export type ServerMessageHandler = (action: Action) => void | Promise<void>; + + } + + /** + * Validates and reads the configuration file, accordingly builds a child process factory + * and spawns off an initial process that will respawn as predecessors die. + */ + export class Monitor { + + private static count = 0; + private exitHandlers: ExitHandler[] = []; + private readonly notifiers: Monitor.NotifierHooks | undefined; + private readonly configuration: Configuration; + private onMessage: { [message: string]: Monitor.ServerMessageHandler[] | undefined } = {}; + private activeWorker: Worker | undefined; + private key: string | undefined; + private repl: Repl; + + public static Create(notifiers?: Monitor.NotifierHooks) { + if (isWorker) { + process.send?.({ + action: { + message: "kill", + args: { + reason: "cannot create a monitor on the worker process.", + graceful: false, + errorCode: 1 + } + } + }); + process.exit(1); + } else if (++Monitor.count > 1) { + console.error(red("cannot create more than one monitor.")); + process.exit(1); + } else { + return new Monitor(notifiers); + } + } + + /** + * Kill this session and its active child + * server process, either gracefully (may wait + * indefinitely, but at least allows active networking + * requests to complete) or immediately. + */ + public killSession = async (reason: string, graceful = true, errorCode = 0) => { + this.log(cyan(`exiting session ${graceful ? "clean" : "immediate"}ly`)); + this.log(`reason: ${(red(reason))}`); + await this.executeExitHandlers(null); + this.tryKillActiveWorker(graceful); + process.exit(errorCode); + } + + /** + * Execute the list of functions registered to be called + * whenever the process exits. + */ + public addExitHandler = (handler: ExitHandler) => this.exitHandlers.push(handler); + + /** + * Extend the default repl by adding in custom commands + * that can invoke application logic external to this module + */ + public addReplCommand = (basename: string, argPatterns: (RegExp | string)[], action: ReplAction) => { + this.repl.registerCommand(basename, argPatterns, action); + } + + /** + * Add a listener at this message. When the monitor process + * receives a message, it will invoke all registered functions. + */ + public addServerMessageListener = (message: string, handler: Monitor.ServerMessageHandler) => { + const handlers = this.onMessage[message]; + if (handlers) { + handlers.push(handler); + } else { + this.onMessage[message] = [handler]; + } + } + + /** + * Unregister a given listener at this message. + */ + public removeServerMessageListener = (message: string, handler: Monitor.ServerMessageHandler) => { + const handlers = this.onMessage[message]; + if (handlers) { + const index = handlers.indexOf(handler); + if (index > -1) { + handlers.splice(index, 1); + } + } + } + + /** + * Unregister all listeners at this message. + */ + public clearServerMessageListeners = (message: string) => this.onMessage[message] = undefined; + + private constructor(notifiers?: Monitor.NotifierHooks) { + this.notifiers = notifiers; + + console.log(this.timestamp(), cyan("initializing session...")); + + this.configuration = this.loadAndValidateConfiguration(); + this.initializeSessionKey(); + // determines whether or not we see the compilation / initialization / runtime output of each child server process + setupMaster({ silent: !this.configuration.showServerOutput }); + + // handle exceptions in the master thread - there shouldn't be many of these + // the IPC (inter process communication) channel closed exception can't seem + // to be caught in a try catch, and is inconsequential, so it is ignored + process.on("uncaughtException", ({ message, stack }): void => { + if (message !== "Channel closed") { + this.log(red(message)); + if (stack) { + this.log(`uncaught exception\n${red(stack)}`); + } + } + }); + + // a helpful cluster event called on the master thread each time a child process exits + on("exit", ({ process: { pid } }, code, signal) => { + const prompt = `server worker with process id ${pid} has exited with code ${code}${signal === null ? "" : `, having encountered signal ${signal}`}.`; + this.log(cyan(prompt)); + // to make this a robust, continuous session, every time a child process dies, we immediately spawn a new one + this.spawn(); + }); + + this.repl = this.initializeRepl(); + this.spawn(); + } + + + /** + * Generates a blue UTC string associated with the time + * of invocation. + */ + private timestamp = () => blue(`[${new Date().toUTCString()}]`); + + /** + * A formatted, identified and timestamped log in color + */ + public log = (...optionalParams: any[]) => { + console.log(this.timestamp(), this.configuration.masterIdentifier, ...optionalParams); + } + + /** + * If the caller has indicated an interest + * in being notified of this feature, creates + * a GUID for this session that can, for example, + * be used as authentication for killing the server + * (checked externally). + */ + private initializeSessionKey = async (): Promise<void> => { + if (this.notifiers?.key) { + this.key = Utils.GenerateGuid(); + const success = await this.notifiers.key(this.key); + const statement = success ? green("distributed session key to recipients") : red("distribution of session key failed"); + this.log(statement); + } + } + + /** + * Builds the repl that allows the following commands to be typed into stdin of the master thread. + */ + private initializeRepl = (): Repl => { + const repl = new Repl({ identifier: () => `${this.timestamp()} ${this.configuration.masterIdentifier}` }); + const boolean = /true|false/; + const number = /\d+/; + const letters = /[a-zA-Z]+/; + repl.registerCommand("exit", [/clean|force/], args => this.killSession("manual exit requested by repl", args[0] === "clean", 0)); + repl.registerCommand("restart", [/clean|force/], args => this.tryKillActiveWorker(args[0] === "clean")); + repl.registerCommand("set", [letters, "port", number, boolean], args => this.setPort(args[0], Number(args[2]), args[3] === "true")); + repl.registerCommand("set", [/polling/, number, boolean], args => { + const newPollingIntervalSeconds = Math.floor(Number(args[2])); + if (newPollingIntervalSeconds < 0) { + this.log(red("the polling interval must be a non-negative integer")); + } else { + if (newPollingIntervalSeconds !== this.configuration.pollingIntervalSeconds) { + this.configuration.pollingIntervalSeconds = newPollingIntervalSeconds; + if (args[3] === "true") { + this.activeWorker?.send({ newPollingIntervalSeconds }); + } + } + } + }); + return repl; + } + + /** + * Reads in configuration .json file only once, in the master thread + * and pass down any variables the pertinent to the child processes as environment variables. + */ + private loadAndValidateConfiguration = (): Configuration => { + try { + console.log(this.timestamp(), cyan("validating configuration...")); + const configuration: Configuration = JSON.parse(readFileSync('./session.config.json', 'utf8')); + const options = { + throwError: true, + allowUnknownAttributes: false + }; + // ensure all necessary and no excess information is specified by the configuration file + validate(configuration, configurationSchema, options); + let formatMaster = true; + let formatWorker = true; + Object.keys(defaultConfiguration).forEach(property => { + if (!configuration[property]) { + if (property === "masterIdentifier") { + formatMaster = false; + } else if (property === "workerIdentifier") { + formatWorker = false; + } + configuration[property] = defaultConfiguration[property]; + } + }); + if (formatMaster) { + configuration.masterIdentifier = yellow(configuration.masterIdentifier + ":"); + } + if (formatWorker) { + configuration.workerIdentifier = magenta(configuration.workerIdentifier + ":"); + } + return configuration; + } catch (error) { + if (error instanceof ValidationError) { + console.log(red("\nSession configuration failed.")); + console.log("The given session.config.json configuration file is invalid."); + console.log(`${error.instance}: ${error.stack}`); + process.exit(0); + } else if (error.code === "ENOENT" && error.path === "./session.config.json") { + console.log(cyan("Loading default session parameters...")); + console.log("Consider including a session.config.json configuration file in your project root for customization."); + return defaultConfiguration; + } else { + console.log(red("\nSession configuration failed.")); + console.log("The following unknown error occurred during configuration."); + console.log(error.stack); + process.exit(0); + } + } + } + + + private executeExitHandlers = async (reason: Error | null) => Promise.all(this.exitHandlers.map(handler => handler(reason))); + + /** + * Attempts to kill the active worker gracefully, unless otherwise specified. + */ + private tryKillActiveWorker = (graceful = true): boolean => { + if (!this.activeWorker?.isDead()) { + if (graceful) { + this.activeWorker?.send({ manualExit: true }); + } else { + this.activeWorker?.process.kill(); + } + return true; + } + return false; + } + + /** + * Allows the caller to set the port at which the target (be it the server, + * the websocket, some other custom port) is listening. If an immediate restart + * is specified, this monitor will kill the active child and re-launch the server + * at the port. Otherwise, the updated port won't be used until / unless the child + * dies on its own and triggers a restart. + */ + private setPort = (port: "server" | "socket" | string, value: number, immediateRestart: boolean): void => { + if (value > 1023 && value < 65536) { + this.configuration.ports[port] = value; + if (immediateRestart) { + this.tryKillActiveWorker(); + } + } else { + this.log(red(`${port} is an invalid port number`)); + } + } + + /** + * Kills the current active worker and proceeds to spawn a new worker, + * feeding in configuration information as environment variables. + */ + private spawn = (): void => { + const { + pollingRoute, + pollingFailureTolerance, + pollingIntervalSeconds, + ports + } = this.configuration; + this.tryKillActiveWorker(); + this.activeWorker = fork({ + pollingRoute, + pollingFailureTolerance, + serverPort: ports.server, + socketPort: ports.socket, + pollingIntervalSeconds, + session_key: this.key + }); + this.log(cyan(`spawned new server worker with process id ${this.activeWorker.process.pid}`)); + // an IPC message handler that executes actions on the master thread when prompted by the active worker + this.activeWorker.on("message", async ({ lifecycle, action }) => { + if (action) { + const { message, args } = action as Monitor.Action; + console.log(this.timestamp(), `${this.configuration.workerIdentifier} action requested (${cyan(message)})`); + switch (message) { + case "kill": + const { reason, graceful, errorCode } = args; + this.killSession(reason, graceful, errorCode); + break; + case "notify_crash": + if (this.notifiers?.crash) { + const { error } = args; + const success = await this.notifiers.crash(error); + const statement = success ? green("distributed crash notification to recipients") : red("distribution of crash notification failed"); + this.log(statement); + } + break; + case "set_port": + const { port, value, immediateRestart } = args; + this.setPort(port, value, immediateRestart); + break; + } + const handlers = this.onMessage[message]; + if (handlers) { + handlers.forEach(handler => handler({ message, args })); + } + } else if (lifecycle) { + console.log(this.timestamp(), `${this.configuration.workerIdentifier} lifecycle phase (${lifecycle})`); + } + }); + } + + } + + /** + * Effectively, each worker repairs the connection to the server by reintroducing a consistent state + * if its predecessor has died. It itself also polls the server heartbeat, and exits with a notification + * email if the server encounters an uncaught exception or if the server cannot be reached. + */ + export class ServerWorker { + + private static count = 0; + private shouldServerBeResponsive = false; + private exitHandlers: ExitHandler[] = []; + private pollingFailureCount = 0; + private pollingIntervalSeconds: number; + private pollingFailureTolerance: number; + private pollTarget: string; + private serverPort: number; + + public static Create(work: Function) { + if (isMaster) { + console.error(red("cannot create a worker on the monitor process.")); + process.exit(1); + } else if (++ServerWorker.count > 1) { + process.send?.({ + action: { + message: "kill", args: { + reason: "cannot create more than one worker on a given worker process.", + graceful: false, + errorCode: 1 + } + } + }); + process.exit(1); + } else { + return new ServerWorker(work); + } + } + + /** + * Allows developers to invoke application specific logic + * by hooking into the exiting of the server process. + */ + public addExitHandler = (handler: ExitHandler) => this.exitHandlers.push(handler); + + /** + * Kill the session monitor (parent process) from this + * server worker (child process). This will also kill + * this process (child process). + */ + public killSession = (reason: string, graceful = true, errorCode = 0) => this.sendMonitorAction("kill", { reason, graceful, errorCode }); + + /** + * A convenience wrapper to tell the session monitor (parent process) + * to carry out the action with the specified message and arguments. + */ + public sendMonitorAction = (message: string, args?: any) => process.send!({ action: { message, args } }); + + private constructor(work: Function) { + this.lifecycleNotification(green(`initializing process... (${white(`${process.execPath} ${process.execArgv.join(" ")}`)})`)); + + const { pollingRoute, serverPort, pollingIntervalSeconds, pollingFailureTolerance } = process.env; + this.serverPort = Number(serverPort); + this.pollingIntervalSeconds = Number(pollingIntervalSeconds); + this.pollingFailureTolerance = Number(pollingFailureTolerance); + this.pollTarget = `http://localhost:${serverPort}${pollingRoute}`; + + this.configureProcess(); + work(); + this.pollServer(); + } + + /** + * Set up message and uncaught exception handlers for this + * server process. + */ + private configureProcess = () => { + // updates the local values of variables to the those sent from master + process.on("message", async ({ newPollingIntervalSeconds, manualExit }) => { + if (newPollingIntervalSeconds !== undefined) { + this.pollingIntervalSeconds = newPollingIntervalSeconds; + } + if (manualExit !== undefined) { + await this.executeExitHandlers(null); + process.exit(0); + } + }); + + // one reason to exit, as the process might be in an inconsistent state after such an exception + process.on('uncaughtException', this.proactiveUnplannedExit); + } + + /** + * Execute the list of functions registered to be called + * whenever the process exits. + */ + private executeExitHandlers = async (reason: Error | null) => Promise.all(this.exitHandlers.map(handler => handler(reason))); + + /** + * Notify master thread (which will log update in the console) of initialization via IPC. + */ + public lifecycleNotification = (event: string) => process.send?.({ lifecycle: event }); + + /** + * Called whenever the process has a reason to terminate, either through an uncaught exception + * in the process (potentially inconsistent state) or the server cannot be reached. + */ + private proactiveUnplannedExit = async (error: Error): Promise<void> => { + this.shouldServerBeResponsive = false; + // communicates via IPC to the master thread that it should dispatch a crash notification email + this.sendMonitorAction("notify_crash", { error }); + await this.executeExitHandlers(error); + // notify master thread (which will log update in the console) of crash event via IPC + this.lifecycleNotification(red(`crash event detected @ ${new Date().toUTCString()}`)); + this.lifecycleNotification(red(error.message)); + process.exit(1); + } + + /** + * This monitors the health of the server by submitting a get request to whatever port / route specified + * by the configuration every n seconds, where n is also given by the configuration. + */ + private pollServer = async (): Promise<void> => { + await new Promise<void>(resolve => { + setTimeout(async () => { + try { + await get(this.pollTarget); + if (!this.shouldServerBeResponsive) { + // notify monitor thread that the server is up and running + this.lifecycleNotification(green(`listening on ${this.serverPort}...`)); + } + this.shouldServerBeResponsive = true; + resolve(); + } catch (error) { + // if we expect the server to be unavailable, i.e. during compilation, + // the listening variable is false, activeExit will return early and the child + // process will continue + if (this.shouldServerBeResponsive) { + if (++this.pollingFailureCount > this.pollingFailureTolerance) { + this.proactiveUnplannedExit(error); + } else { + this.lifecycleNotification(yellow(`the server has encountered ${this.pollingFailureCount} of ${this.pollingFailureTolerance} tolerable failures`)); + } + } + } + }, 1000 * this.pollingIntervalSeconds); + }); + // controlled, asynchronous infinite recursion achieves a persistent poll that does not submit a new request until the previous has completed + this.pollServer(); + } + + } + +}
\ No newline at end of file diff --git a/src/server/Session/session_config_schema.ts b/src/server/Session/session_config_schema.ts new file mode 100644 index 000000000..5a85a45e3 --- /dev/null +++ b/src/server/Session/session_config_schema.ts @@ -0,0 +1,39 @@ +import { Schema } from "jsonschema"; + +export const configurationSchema: Schema = { + id: "/configuration", + type: "object", + properties: { + ports: { + type: "object", + properties: { + server: { type: "number", minimum: 1024, maximum: 65535 }, + socket: { type: "number", minimum: 1024, maximum: 65535 } + }, + required: ["server"], + additionalProperties: true + }, + pollingRoute: { + type: "string", + pattern: /\/[a-zA-Z]*/g + }, + masterIdentifier: { + type: "string", + minLength: 1 + }, + workerIdentifier: { + type: "string", + minLength: 1 + }, + showServerOutput: { type: "boolean" }, + pollingIntervalSeconds: { + type: "number", + minimum: 1, + maximum: 86400 + }, + pollingFailureTolerance: { + type: "number", + minimum: 0, + } + } +};
\ No newline at end of file diff --git a/src/server/SharedMediaTypes.ts b/src/server/SharedMediaTypes.ts new file mode 100644 index 000000000..8d0f441f0 --- /dev/null +++ b/src/server/SharedMediaTypes.ts @@ -0,0 +1,8 @@ +export namespace AcceptibleMedia { + export const gifs = [".gif"]; + export const pngs = [".png"]; + export const jpgs = [".jpg", ".jpeg"]; + export const imageFormats = [...pngs, ...jpgs, ...gifs]; + export const videoFormats = [".mov", ".mp4"]; + export const applicationFormats = [".pdf"]; +}
\ No newline at end of file diff --git a/src/server/Websocket/Websocket.ts b/src/server/Websocket/Websocket.ts new file mode 100644 index 000000000..578147d60 --- /dev/null +++ b/src/server/Websocket/Websocket.ts @@ -0,0 +1,228 @@ +import { Utils } from "../../Utils"; +import { MessageStore, Transferable, Types, Diff, YoutubeQueryInput, YoutubeQueryTypes } from "../Message"; +import { Client } from "../Client"; +import { Socket } from "socket.io"; +import { Database } from "../database"; +import { Search } from "../Search"; +import * as io from 'socket.io'; +import YoutubeApi from "../apis/youtube/youtubeApiSample"; +import { GoogleCredentialsLoader } from "../credentials/CredentialsLoader"; +import { logPort } from "../ActionUtilities"; +import { timeMap } from "../ApiManagers/UserManager"; +import { green } from "colors"; + +export namespace WebSocket { + + export let _socket: Socket; + const clients: { [key: string]: Client } = {}; + export const socketMap = new Map<SocketIO.Socket, string>(); + export let disconnect: Function; + + export async function start(isRelease: boolean) { + await preliminaryFunctions(); + initialize(isRelease); + } + + async function preliminaryFunctions() { + } + + function initialize(isRelease: boolean) { + const endpoint = io(); + endpoint.on("connection", function (socket: Socket) { + _socket = socket; + + socket.use((_packet, next) => { + const userEmail = socketMap.get(socket); + if (userEmail) { + timeMap[userEmail] = Date.now(); + } + next(); + }); + + Utils.Emit(socket, MessageStore.Foo, "handshooken"); + + Utils.AddServerHandler(socket, MessageStore.Bar, guid => barReceived(socket, guid)); + Utils.AddServerHandler(socket, MessageStore.SetField, (args) => setField(socket, args)); + Utils.AddServerHandlerCallback(socket, MessageStore.GetField, getField); + Utils.AddServerHandlerCallback(socket, MessageStore.GetFields, getFields); + if (isRelease) { + Utils.AddServerHandler(socket, MessageStore.DeleteAll, deleteFields); + } + + Utils.AddServerHandler(socket, MessageStore.CreateField, CreateField); + Utils.AddServerHandlerCallback(socket, MessageStore.YoutubeApiQuery, HandleYoutubeQuery); + Utils.AddServerHandler(socket, MessageStore.UpdateField, diff => UpdateField(socket, diff)); + Utils.AddServerHandler(socket, MessageStore.DeleteField, id => DeleteField(socket, id)); + Utils.AddServerHandler(socket, MessageStore.DeleteFields, ids => DeleteFields(socket, ids)); + Utils.AddServerHandlerCallback(socket, MessageStore.GetRefField, GetRefField); + Utils.AddServerHandlerCallback(socket, MessageStore.GetRefFields, GetRefFields); + + disconnect = () => { + socket.broadcast.emit("connection_terminated", Date.now()); + socket.disconnect(true); + }; + }); + + const socketPort = isRelease ? Number(process.env.socketPort) : 4321; + endpoint.listen(socketPort); + logPort("websocket", socketPort); + } + + function HandleYoutubeQuery([query, callback]: [YoutubeQueryInput, (result?: any[]) => void]) { + const { ProjectCredentials } = GoogleCredentialsLoader; + switch (query.type) { + case YoutubeQueryTypes.Channels: + YoutubeApi.authorizedGetChannel(ProjectCredentials); + break; + case YoutubeQueryTypes.SearchVideo: + YoutubeApi.authorizedGetVideos(ProjectCredentials, query.userInput, callback); + case YoutubeQueryTypes.VideoDetails: + YoutubeApi.authorizedGetVideoDetails(ProjectCredentials, query.videoIds, callback); + } + } + + export async function deleteFields() { + await Database.Instance.deleteAll(); + await Search.clear(); + await Database.Instance.deleteAll('newDocuments'); + } + + export async function deleteAll() { + await Database.Instance.deleteAll(); + await Database.Instance.deleteAll('newDocuments'); + await Database.Instance.deleteAll('sessions'); + await Database.Instance.deleteAll('users'); + await Search.clear(); + } + + function barReceived(socket: SocketIO.Socket, userEmail: string) { + clients[userEmail] = new Client(userEmail.toString()); + console.log(green(`user ${userEmail} has connected to the web socket`)); + socketMap.set(socket, userEmail); + } + + function getField([id, callback]: [string, (result?: Transferable) => void]) { + Database.Instance.getDocument(id, (result?: Transferable) => + callback(result ? result : undefined)); + } + + function getFields([ids, callback]: [string[], (result: Transferable[]) => void]) { + Database.Instance.getDocuments(ids, callback); + } + + function setField(socket: Socket, newValue: Transferable) { + Database.Instance.update(newValue.id, newValue, () => + socket.broadcast.emit(MessageStore.SetField.Message, newValue)); + if (newValue.type === Types.Text) { + Search.updateDocument({ id: newValue.id, data: (newValue as any).data }); + console.log("set field"); + console.log("checking in"); + } + } + + function GetRefField([id, callback]: [string, (result?: Transferable) => void]) { + Database.Instance.getDocument(id, callback, "newDocuments"); + } + + function GetRefFields([ids, callback]: [string[], (result?: Transferable[]) => void]) { + Database.Instance.getDocuments(ids, callback, "newDocuments"); + } + + const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = { + "number": "_n", + "string": "_t", + "boolean": "_b", + "image": ["_t", "url"], + "video": ["_t", "url"], + "pdf": ["_t", "url"], + "audio": ["_t", "url"], + "web": ["_t", "url"], + "RichTextField": ["_t", value => value.Text], + "date": ["_d", value => new Date(value.date).toISOString()], + "proxy": ["_i", "fieldId"], + "list": ["_l", list => { + const results = []; + for (const value of list.fields) { + const term = ToSearchTerm(value); + if (term) { + results.push(term.value); + } + } + return results.length ? results : null; + }] + }; + + function ToSearchTerm(val: any): { suffix: string, value: any } | undefined { + if (val === null || val === undefined) { + return; + } + const type = val.__type || typeof val; + let suffix = suffixMap[type]; + if (!suffix) { + return; + } + + if (Array.isArray(suffix)) { + const accessor = suffix[1]; + if (typeof accessor === "function") { + val = accessor(val); + } else { + val = val[accessor]; + } + suffix = suffix[0]; + } + + return { suffix, value: val }; + } + + function getSuffix(value: string | [string, any]): string { + return typeof value === "string" ? value : value[0]; + } + + function UpdateField(socket: Socket, diff: Diff) { + Database.Instance.update(diff.id, diff.diff, + () => socket.broadcast.emit(MessageStore.UpdateField.Message, diff), false, "newDocuments"); + const docfield = diff.diff.$set || diff.diff.$unset; + if (!docfield) { + return; + } + const update: any = { id: diff.id }; + let dynfield = false; + for (let key in docfield) { + if (!key.startsWith("fields.")) continue; + dynfield = true; + const val = docfield[key]; + key = key.substring(7); + Object.values(suffixMap).forEach(suf => update[key + getSuffix(suf)] = { set: null }); + const term = ToSearchTerm(val); + if (term !== undefined) { + const { suffix, value } = term; + update[key + suffix] = { set: value }; + } + } + if (dynfield) { + Search.updateDocument(update); + } + } + + function DeleteField(socket: Socket, id: string) { + Database.Instance.delete({ _id: id }, "newDocuments").then(() => { + socket.broadcast.emit(MessageStore.DeleteField.Message, id); + }); + + Search.deleteDocuments([id]); + } + + function DeleteFields(socket: Socket, ids: string[]) { + Database.Instance.delete({ _id: { $in: ids } }, "newDocuments").then(() => { + socket.broadcast.emit(MessageStore.DeleteFields.Message, ids); + }); + Search.deleteDocuments(ids); + } + + function CreateField(newValue: any) { + Database.Instance.insert(newValue, "newDocuments"); + } + +} + diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index 5714c9928..329107a71 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -1,142 +1,282 @@ import { google } from "googleapis"; -import { createInterface } from "readline"; -import { readFile, writeFile } from "fs"; -import { OAuth2Client, Credentials } from "google-auth-library"; +import { OAuth2Client, Credentials, OAuth2ClientOptions } from "google-auth-library"; import { Opt } from "../../../new_fields/Doc"; -import { GlobalOptions } from "googleapis-common"; import { GaxiosResponse } from "gaxios"; import request = require('request-promise'); import * as qs from 'query-string'; -import Photos = require('googlephotos'); import { Database } from "../../database"; +import { GoogleCredentialsLoader } from "../../credentials/CredentialsLoader"; + /** - * Server side authentication for Google Api queries. + * Scopes give Google users fine granularity of control + * over the information they make accessible via the API. + * This is the somewhat overkill list of what Dash requests + * from the user. */ -export namespace GoogleApiServerUtils { +const scope = [ + 'documents.readonly', + 'documents', + 'presentations', + 'presentations.readonly', + 'drive', + 'drive.file', + 'photoslibrary', + 'photoslibrary.appendonly', + 'photoslibrary.sharing', + 'userinfo.profile' +].map(relative => `https://www.googleapis.com/auth/${relative}`); - // If modifying these scopes, delete token.json. - const prefix = 'https://www.googleapis.com/auth/'; - const SCOPES = [ - 'documents.readonly', - 'documents', - 'presentations', - 'presentations.readonly', - 'drive', - 'drive.file', - 'photoslibrary', - 'photoslibrary.appendonly', - 'photoslibrary.sharing', - 'userinfo.profile' - ]; - - export const parseBuffer = (data: Buffer) => JSON.parse(data.toString()); +/** + * This namespace manages server side authentication for Google API queries, either + * from the standard v1 APIs or the Google Photos REST API. + */ +export namespace GoogleApiServerUtils { + /** + * As we expand out to more Google APIs that are accessible from + * the 'googleapis' module imported above, this enum will record + * the list and provide a unified string representation of each API. + */ export enum Service { Documents = "Documents", Slides = "Slides" } - export interface CredentialInformation { - credentialsPath: string; - userId: string; + /** + * Global credentials read once from a JSON file + * before the server is started that + * allow us to build OAuth2 clients with Dash's + * application specific credentials. + */ + let oAuthOptions: OAuth2ClientOptions; + + /** + * This is a global authorization client that is never + * passed around, and whose credentials are never set. + * Its job is purely to generate new authentication urls + * (users will follow to get to Google's permissions GUI) + * and to use the codes returned from that process to generate the + * initial credentials. + */ + let worker: OAuth2Client; + + /** + * This function is called once before the server is started, + * reading in Dash's project-specific credentials (client secret + * and client id) for later repeated access. It also sets up the + * global, intentionally unauthenticated worker OAuth2 client instance. + */ + export function processProjectCredentials(): void { + const { client_secret, client_id, redirect_uris } = GoogleCredentialsLoader.ProjectCredentials; + // initialize the global authorization client + oAuthOptions = { + clientId: client_id, + clientSecret: client_secret, + redirectUri: redirect_uris[0] + }; + worker = generateClient(); } + /** + * A briefer format for the response from a 'googleapis' API request + */ export type ApiResponse = Promise<GaxiosResponse>; + + /** + * A generic form for a handler that executes some request on the endpoint + */ export type ApiRouter = (endpoint: Endpoint, parameters: any) => ApiResponse; + + /** + * A generic form for the asynchronous function that actually submits the + * request to the API and returns the corresporing response. Helpful when + * making an extensible endpoint definition. + */ export type ApiHandler = (parameters: any, methodOptions?: any) => ApiResponse; + + /** + * A literal union type indicating the valid actions for these 'googleapis' + * requestions + */ export type Action = "create" | "retrieve" | "update"; - export type Endpoint = { get: ApiHandler, create: ApiHandler, batchUpdate: ApiHandler }; - export type EndpointParameters = GlobalOptions & { version: "v1" }; - - export const GetEndpoint = (sector: string, paths: CredentialInformation) => { - return new Promise<Opt<Endpoint>>(resolve => { - RetrieveCredentials(paths).then(authentication => { - let routed: Opt<Endpoint>; - let parameters: EndpointParameters = { auth: authentication.client, version: "v1" }; - switch (sector) { - case Service.Documents: - routed = google.docs(parameters).documents; - break; - case Service.Slides: - routed = google.slides(parameters).presentations; - break; - } - resolve(routed); - }); - }); - }; - - export const RetrieveAccessToken = (information: CredentialInformation) => { - return new Promise<string>((resolve, reject) => { - RetrieveCredentials(information).then( - credentials => resolve(credentials.token.access_token!), - error => reject(`Error: unable to authenticate Google Photos API request.\n${error}`) - ); + /** + * An interface defining any entity on which one can invoke + * anuy of the following handlers. All 'googleapis' wrappers + * such as google.docs().documents and google.slides().presentations + * satisfy this interface. + */ + export interface Endpoint { + get: ApiHandler; + create: ApiHandler; + batchUpdate: ApiHandler; + } + + /** + * Maps the Dash user id of a given user to their single + * associated OAuth2 client, mitigating the creation + * of needless duplicate clients that would arise from + * making one new client instance per request. + */ + const authenticationClients = new Map<String, OAuth2Client>(); + + /** + * This function receives the target sector ("which G-Suite app's API am I interested in?") + * and the id of the Dash user making the request to the API. With this information, it generates + * an authenticated OAuth2 client and passes it into the relevant 'googleapis' wrapper. + * @param sector the particular desired G-Suite 'googleapis' API (docs, slides, etc.) + * @param userId the id of the Dash user making the request to the API + * @returns the relevant 'googleapis' wrapper, if any + */ + export async function GetEndpoint(sector: string, userId: string): Promise<Opt<Endpoint>> { + return new Promise(async resolve => { + const auth = await retrieveOAuthClient(userId); + if (!auth) { + return resolve(); + } + let routed: Opt<Endpoint>; + const parameters: any = { auth, version: "v1" }; + switch (sector) { + case Service.Documents: + routed = google.docs(parameters).documents; + break; + case Service.Slides: + routed = google.slides(parameters).presentations; + break; + } + resolve(routed); }); - }; + } - const RetrieveOAuthClient = async (information: CredentialInformation) => { - return new Promise<OAuth2Client>((resolve, reject) => { - readFile(information.credentialsPath, async (err, credentials) => { - if (err) { - reject(err); - return console.log('Error loading client secret file:', err); - } - const { client_secret, client_id, redirect_uris } = parseBuffer(credentials).installed; - resolve(new google.auth.OAuth2(client_id, client_secret, redirect_uris[0])); - }); + /** + * Returns the lengthy string or access token that can be passed into + * the headers of an API request or into the constructor of the Photos + * client API wrapper. + * @param userId the Dash user id of the user requesting his/her associated + * access_token + * @returns the current access_token associated with the requesting + * Dash user. The access_token is valid for only an hour, and + * is then refreshed. + */ + export async function retrieveAccessToken(userId: string): Promise<string> { + return new Promise(async resolve => { + const { credentials } = await retrieveCredentials(userId); + if (!credentials) { + return resolve(); + } + resolve(credentials.access_token!); }); - }; + } - export const GenerateAuthenticationUrl = async (information: CredentialInformation) => { - const client = await RetrieveOAuthClient(information); - return client.generateAuthUrl({ - access_type: 'offline', - scope: SCOPES.map(relative => prefix + relative), + /** + * Manipulates a mapping such that, in the limit, each Dash user has + * an associated authenticated OAuth2 client at their disposal. This + * function ensures that the client's credentials always remain up to date + * @param userId the Dash user id of the user requesting account integration + * @returns returns an initialized OAuth2 client instance, likely to be passed into Google's + * npm-installed API wrappers that use authenticated client instances rather than access codes for + * security. + */ + export async function retrieveOAuthClient(userId: string): Promise<OAuth2Client> { + return new Promise(async resolve => { + const { credentials, refreshed } = await retrieveCredentials(userId); + if (!credentials) { + return resolve(); + } + let client = authenticationClients.get(userId); + if (!client) { + authenticationClients.set(userId, client = generateClient(credentials)); + } else if (refreshed) { + client.setCredentials(credentials); + } + resolve(client); }); - }; + } + + /** + * Creates a new OAuth2Client instance, and if provided, sets + * the specific credentials on the client + * @param credentials if you have access to the credentials that you'll eventually set on + * the client, just pass them in at initialization + * @returns the newly created, potentially certified, OAuth2 client instance + */ + function generateClient(credentials?: Credentials): OAuth2Client { + const client = new google.auth.OAuth2(oAuthOptions); + credentials && client.setCredentials(credentials); + return client; + } + + /** + * Calls on the worker (which does not have and does not need + * any credentials) to produce a url to which the user can + * navigate to give Dash the necessary Google permissions. + * @returns the newly generated url to the authentication landing page + */ + export function generateAuthenticationUrl(): string { + return worker.generateAuthUrl({ scope, access_type: 'offline' }); + } + /** + * This is what we return to the server in processNewUser(), after the + * worker OAuth2Client has used the user-pasted authentication code + * to retrieve an access token and an info token. The avatar is the + * URL to the Google-hosted mono-color, single white letter profile 'image'. + */ export interface GoogleAuthenticationResult { access_token: string; avatar: string; name: string; } - export const ProcessClientSideCode = async (information: CredentialInformation, authenticationCode: string): Promise<GoogleAuthenticationResult> => { - const oAuth2Client = await RetrieveOAuthClient(information); - return new Promise<GoogleAuthenticationResult>((resolve, reject) => { - oAuth2Client.getToken(authenticationCode, async (err, token) => { - if (err || !token) { + + /** + * This method receives the authentication code that the + * user pasted into the overlay in the client side and uses the worker + * and the authentication code to fetch the full set of credentials that + * we'll store in the database for each user. This is called once per + * new account integration. + * @param userId the Dash user id of the user requesting account integration, used to associate the new credentials + * with a Dash user in the googleAuthentication table of the database. + * @param authenticationCode the Google-provided authentication code that the user copied + * from Google's permissions UI and pasted into the overlay. + * + * EXAMPLE CODE: 4/sgF2A5uGg4xASHf7VQDnLtdqo3mUlfQqLSce_HYz5qf1nFtHj9YTeGs + * + * @returns the information necessary to authenticate a client side google photos request + * and display basic user information in the overlay on successful authentication. + * This can be expanded as needed by adding properties to the interface GoogleAuthenticationResult. + */ + export async function processNewUser(userId: string, authenticationCode: string): Promise<GoogleAuthenticationResult> { + const credentials = await new Promise<Credentials>((resolve, reject) => { + worker.getToken(authenticationCode, async (err, credentials) => { + if (err || !credentials) { reject(err); - return console.error('Error retrieving access token', err); + return; } - oAuth2Client.setCredentials(token); - const enriched = injectUserInfo(token); - await Database.Auxiliary.GoogleAuthenticationToken.Write(information.userId, enriched); - const { given_name, picture } = enriched.userInfo; - resolve({ - access_token: enriched.access_token!, - avatar: picture, - name: given_name - }); + resolve(credentials); }); }); - }; + const enriched = injectUserInfo(credentials); + await Database.Auxiliary.GoogleAuthenticationToken.Write(userId, enriched); + const { given_name, picture } = enriched.userInfo; + return { + access_token: enriched.access_token!, + avatar: picture, + name: given_name + }; + } /** - * It's pretty cool: the credentials id_token is split into thirds by periods. - * The middle third contains a base64-encoded JSON string with all the - * user info contained in the interface below. So, we isolate that middle third, - * base64 decode with atob and parse the JSON. - * @param credentials the client credentials returned from OAuth after the user - * has executed the authentication routine + * This type represents the union of the full set of OAuth2 credentials + * and all of a Google user's publically available information. This is the strucure + * of the JSON object we ultimately store in the googleAuthentication table of the database. */ - const injectUserInfo = (credentials: Credentials): EnrichedCredentials => { - const userInfo = JSON.parse(atob(credentials.id_token!.split(".")[1])); - return { ...credentials, userInfo }; - }; - export type EnrichedCredentials = Credentials & { userInfo: UserInfo }; + + /** + * This interface defines all of the information we + * receive from parsing the base64 encoded info-token + * for a Google user. + */ export interface UserInfo { at_hash: string; aud: string; @@ -152,70 +292,73 @@ export namespace GoogleApiServerUtils { sub: string; } - export const RetrieveCredentials = (information: CredentialInformation) => { - return new Promise<TokenResult>((resolve, reject) => { - readFile(information.credentialsPath, async (err, credentials) => { - if (err) { - reject(err); - return console.log('Error loading client secret file:', err); - } - authorize(parseBuffer(credentials), information.userId).then(resolve, reject); - }); - }); - }; - - export const RetrievePhotosEndpoint = (paths: CredentialInformation) => { - return new Promise<any>((resolve, reject) => { - RetrieveAccessToken(paths).then( - token => resolve(new Photos(token)), - reject - ); - }); - }; - - type TokenResult = { token: Credentials, client: OAuth2Client }; - /** - * Create an OAuth2 client with the given credentials, and returns the promise resolving to the authenticated client - * @param {Object} credentials The authorization client credentials. - */ - export function authorize(credentials: any, userId: string): Promise<TokenResult> { - const { client_secret, client_id, redirect_uris } = credentials.installed; - const oAuth2Client = new google.auth.OAuth2(client_id, client_secret, redirect_uris[0]); - return new Promise<TokenResult>((resolve, reject) => { - // Attempting to authorize user (${userId}) - Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId).then(token => { - if (token!.expiry_date! < new Date().getTime()) { - // Token has expired, so submitting a request for a refreshed access token - return refreshToken(token!, client_id, client_secret, oAuth2Client, userId).then(resolve, reject); - } - // Authentication successful! - oAuth2Client.setCredentials(token!); - resolve({ token: token!, client: oAuth2Client }); - }); - }); + /** + * It's pretty cool: the credentials id_token is split into thirds by periods. + * The middle third contains a base64-encoded JSON string with all the + * user info contained in the interface below. So, we isolate that middle third, + * base64 decode with atob and parse the JSON. + * @param credentials the client credentials returned from OAuth after the user + * has executed the authentication routine + * @returns the full set of credentials in the structure in which they'll be stored + * in the database. + */ + function injectUserInfo(credentials: Credentials): EnrichedCredentials { + const userInfo: UserInfo = JSON.parse(atob(credentials.id_token!.split(".")[1])); + return { ...credentials, userInfo }; } - const refreshEndpoint = "https://oauth2.googleapis.com/token"; - const refreshToken = (credentials: Credentials, client_id: string, client_secret: string, oAuth2Client: OAuth2Client, userId: string) => { - return new Promise<TokenResult>(resolve => { - let headerParameters = { headers: { 'Content-Type': 'application/x-www-form-urlencoded' } }; - let queryParameters = { - refreshToken: credentials.refresh_token, - client_id, - client_secret, - grant_type: "refresh_token" - }; - let url = `${refreshEndpoint}?${qs.stringify(queryParameters)}`; - request.post(url, headerParameters).then(async response => { - let { access_token, expires_in } = JSON.parse(response); - const expiry_date = new Date().getTime() + (expires_in * 1000); - await Database.Auxiliary.GoogleAuthenticationToken.Update(userId, access_token, expiry_date); - credentials.access_token = access_token; - credentials.expiry_date = expiry_date; - oAuth2Client.setCredentials(credentials); - resolve({ token: credentials, client: oAuth2Client }); - }); + /** + * Looks in the database for any credentials object with the given user id, + * and returns them. If the credentials are found but expired, the function will + * automatically refresh the credentials and then resolve with the updated values. + * @param userId the id of the Dash user requesting his/her credentials. Eventually, each user might + * be associated with multiple different sets of Google credentials. + * @returns the credentials, or undefined if the user has no stored associated credentials, + * and a flag indicating whether or not they were refreshed during retrieval + */ + async function retrieveCredentials(userId: string): Promise<{ credentials: Opt<Credentials>, refreshed: boolean }> { + let credentials: Opt<Credentials> = await Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId); + const refreshed = false; + if (!credentials) { + return { credentials: undefined, refreshed }; + } + // check for token expiry + if (credentials.expiry_date! <= new Date().getTime()) { + credentials = await refreshAccessToken(credentials, userId); + } + return { credentials, refreshed }; + } + + /** + * This function submits a request to OAuth with the local refresh token + * to revalidate the credentials for a given Google user associated with + * the Dash user id passed in. In addition to returning the credentials, it + * writes the diff to the database. + * @param credentials the credentials + * @param userId the id of the Dash user implicitly requesting that + * his/her credentials be refreshed + * @returns the updated credentials + */ + async function refreshAccessToken(credentials: Credentials, userId: string): Promise<Credentials> { + const headerParameters = { headers: { 'Content-Type': 'application/x-www-form-urlencoded' } }; + const { client_id, client_secret } = GoogleCredentialsLoader.ProjectCredentials; + const url = `https://oauth2.googleapis.com/token?${qs.stringify({ + refreshToken: credentials.refresh_token, + client_id, + client_secret, + grant_type: "refresh_token" + })}`; + const { access_token, expires_in } = await new Promise<any>(async resolve => { + const response = await request.post(url, headerParameters); + resolve(JSON.parse(response)); }); - }; + // expires_in is in seconds, but we're building the new expiry date in milliseconds + const expiry_date = new Date().getTime() + (expires_in * 1000); + await Database.Auxiliary.GoogleAuthenticationToken.Update(userId, access_token, expiry_date); + // update the relevant properties + credentials.access_token = access_token; + credentials.expiry_date = expiry_date; + return credentials; + } }
\ No newline at end of file diff --git a/src/server/apis/google/GooglePhotosUploadUtils.ts b/src/server/apis/google/GooglePhotosUploadUtils.ts index 36256822c..8ae63caa3 100644 --- a/src/server/apis/google/GooglePhotosUploadUtils.ts +++ b/src/server/apis/google/GooglePhotosUploadUtils.ts @@ -1,74 +1,137 @@ import request = require('request-promise'); -import { GoogleApiServerUtils } from './GoogleApiServerUtils'; import * as path from 'path'; -import { MediaItemCreationResult, NewMediaItemResult } from './SharedTypes'; -import { NewMediaItem } from "../../index"; +import { NewMediaItemResult } from './SharedTypes'; import { BatchedArray, TimeUnit } from 'array-batcher'; import { DashUploadUtils } from '../../DashUploadUtils'; +/** + * This namespace encompasses the logic + * necessary to upload images to Google's server, + * and then initialize / create those images in the Photos + * API given the upload tokens returned from the initial + * uploading process. + * + * https://developers.google.com/photos/library/reference/rest/v1/mediaItems/batchCreate + */ export namespace GooglePhotosUploadUtils { - export interface Paths { - uploadDirectory: string; - credentialsPath: string; - tokenPath: string; - } - - export interface MediaInput { + /** + * Specifies the structure of the object + * necessary to upload bytes to Google's servers. + * The url is streamed to access the image's bytes, + * and the description is what appears in Google Photos' + * description field. + */ + export interface UploadSource { url: string; description: string; } - const prepend = (extension: string) => `https://photoslibrary.googleapis.com/v1/${extension}`; - const headers = (type: string) => ({ - 'Content-Type': `application/${type}`, - 'Authorization': Bearer, - }); + /** + * This is the format needed to pass + * into the BatchCreate API request + * to take a reference to raw uploaded bytes + * and actually create an image in Google Photos. + * + * So, to instantiate this interface you must have already dispatched an upload + * and received an upload token. + */ + export interface NewMediaItem { + description: string; + simpleMediaItem: { + uploadToken: string; + }; + } - let Bearer: string; + /** + * A utility function to streamline making + * calls to the API's url - accentuates + * the relative path in the caller. + * @param extension the desired + * subset of the API + */ + function prepend(extension: string): string { + return `https://photoslibrary.googleapis.com/v1/${extension}`; + } - export const initialize = async (information: GoogleApiServerUtils.CredentialInformation) => { - const token = await GoogleApiServerUtils.RetrieveAccessToken(information); - Bearer = `Bearer ${token}`; - }; + /** + * Factors out the creation of the API request's + * authentication elements stored in the header. + * @param type the contents of the request + * @param token the user-specific Google access token + */ + function headers(type: string, token: string) { + return { + 'Content-Type': `application/${type}`, + 'Authorization': `Bearer ${token}`, + }; + } - export const DispatchGooglePhotosUpload = async (url: string) => { - if (!DashUploadUtils.imageFormats.includes(path.extname(url))) { + /** + * This is the first step in the remote image creation process. + * Here we upload the raw bytes of the image to Google's servers by + * setting authentication and other required header properties and including + * the raw bytes to the image, to be uploaded, in the body of the request. + * @param bearerToken the user-specific Google access token, specifies the account associated + * with the eventual image creation + * @param url the url of the image to upload + * @param filename an optional name associated with the uploaded image - if not specified + * defaults to the filename (basename) in the url + */ + export const DispatchGooglePhotosUpload = async (bearerToken: string, url: string, filename?: string): Promise<any> => { + // check if the url points to a non-image or an unsupported format + if (!DashUploadUtils.validateExtension(url)) { return undefined; } - const body = await request(url, { encoding: null }); const parameters = { method: 'POST', + uri: prepend('uploads'), headers: { - ...headers('octet-stream'), - 'X-Goog-Upload-File-Name': path.basename(url), + ...headers('octet-stream', bearerToken), + 'X-Goog-Upload-File-Name': filename || path.basename(url), 'X-Goog-Upload-Protocol': 'raw' }, - uri: prepend('uploads'), - body + body: await request(url, { encoding: null }) // returns a readable stream with the unencoded binary image data }; - return new Promise<any>((resolve, reject) => request(parameters, (error, _response, body) => { + return new Promise((resolve, reject) => request(parameters, (error, _response, body) => { if (error) { - console.log(error); + // on rejection, the server logs the error and the offending image return reject(error); } resolve(body); })); }; - export const CreateMediaItems = async (newMediaItems: NewMediaItem[], album?: { id: string }): Promise<NewMediaItemResult[]> => { + /** + * This is the second step in the remote image creation process: having uploaded + * the raw bytes of the image and received / stored pointers (upload tokens) to those + * bytes, we can now instruct the API to finalize the creation of those images by + * submitting a batch create request with the list of upload tokens and the description + * to be associated with reach resulting new image. + * @param bearerToken the user-specific Google access token, specifies the account associated + * with the eventual image creation + * @param newMediaItems a list of objects containing a description and, effectively, the + * pointer to the uploaded bytes + * @param album if included, will add all of the newly created remote images to the album + * with the specified id + */ + export const CreateMediaItems = async (bearerToken: string, newMediaItems: NewMediaItem[], album?: { id: string }): Promise<NewMediaItemResult[]> => { + // it's important to note that the API can't handle more than 50 items in each request and + // seems to need at least some latency between requests (spamming it synchronously has led to the server returning errors)... const batched = BatchedArray.from(newMediaItems, { batchSize: 50 }); + // ...so we execute them in delayed batches and await the entire execution return batched.batchedMapPatientInterval( { magnitude: 100, unit: TimeUnit.Milliseconds }, - async (batch, collector) => { + async (batch: NewMediaItem[], collector: any): Promise<any> => { const parameters = { method: 'POST', - headers: headers('json'), + headers: headers('json', bearerToken), uri: prepend('mediaItems:batchCreate'), body: { newMediaItems: batch } as any, json: true }; + // register the target album, if provided album && (parameters.body.albumId = album.id); collector.push(...(await new Promise<NewMediaItemResult[]>((resolve, reject) => { request(parameters, (error, _response, body) => { diff --git a/src/server/authentication/config/passport.ts b/src/server/authentication/config/passport.ts index 8915a4abf..286209b20 100644 --- a/src/server/authentication/config/passport.ts +++ b/src/server/authentication/config/passport.ts @@ -1,9 +1,6 @@ import * as passport from 'passport'; import * as passportLocal from 'passport-local'; -import _ from "lodash"; import { default as User } from '../models/user_model'; -import { Request, Response, NextFunction } from "express"; -import { RouteStore } from '../../RouteStore'; const LocalStrategy = passportLocal.Strategy; @@ -29,21 +26,4 @@ passport.use(new LocalStrategy({ usernameField: 'email', passReqToCallback: true return done(undefined, user); }); }); -})); - -export let isAuthenticated = (req: Request, res: Response, next: NextFunction) => { - if (req.isAuthenticated()) { - return next(); - } - return res.redirect(RouteStore.login); -}; - -export let isAuthorized = (req: Request, res: Response, next: NextFunction) => { - const provider = req.path.split("/").slice(-1)[0]; - - if (_.find((req.user as any).tokens, { kind: provider })) { - next(); - } else { - res.redirect(`/auth/${provider}`); - } -};
\ No newline at end of file +}));
\ No newline at end of file diff --git a/src/server/authentication/controllers/user_controller.ts b/src/server/authentication/controllers/user_controller.ts index f5c6e1610..f0086d4ea 100644 --- a/src/server/authentication/controllers/user_controller.ts +++ b/src/server/authentication/controllers/user_controller.ts @@ -3,17 +3,11 @@ import { Request, Response, NextFunction } from "express"; import * as passport from "passport"; import { IVerifyOptions } from "passport-local"; import "../config/passport"; -import * as request from "express-validator"; import flash = require("express-flash"); -import * as session from "express-session"; -import * as pug from 'pug'; import * as async from 'async'; import * as nodemailer from 'nodemailer'; import c = require("crypto"); -import { RouteStore } from "../../RouteStore"; import { Utils } from "../../../Utils"; -import { Schema } from "mongoose"; -import { Opt } from "../../../new_fields/Doc"; import { MailOptions } from "nodemailer/lib/stream-transport"; /** @@ -23,8 +17,7 @@ import { MailOptions } from "nodemailer/lib/stream-transport"; */ export let getSignup = (req: Request, res: Response) => { if (req.user) { - let user = req.user; - return res.redirect(RouteStore.home); + return res.redirect("/home"); } res.render("signup.pug", { title: "Sign Up", @@ -45,7 +38,7 @@ export let postSignup = (req: Request, res: Response, next: NextFunction) => { const errors = req.validationErrors(); if (errors) { - return res.redirect(RouteStore.signup); + return res.redirect("/signup"); } const email = req.body.email as String; @@ -62,7 +55,7 @@ export let postSignup = (req: Request, res: Response, next: NextFunction) => { User.findOne({ email }, (err, existingUser) => { if (err) { return next(err); } if (existingUser) { - return res.redirect(RouteStore.login); + return res.redirect("/login"); } user.save((err: any) => { if (err) { return next(err); } @@ -75,13 +68,13 @@ export let postSignup = (req: Request, res: Response, next: NextFunction) => { }; -let tryRedirectToTarget = (req: Request, res: Response) => { +const tryRedirectToTarget = (req: Request, res: Response) => { if (req.session && req.session.target) { - let target = req.session.target; + const target = req.session.target; req.session.target = undefined; res.redirect(target); } else { - res.redirect(RouteStore.home); + res.redirect("/home"); } }; @@ -93,7 +86,7 @@ let tryRedirectToTarget = (req: Request, res: Response) => { export let getLogin = (req: Request, res: Response) => { if (req.user) { req.session!.target = undefined; - return res.redirect(RouteStore.home); + return res.redirect("/home"); } res.render("login.pug", { title: "Log In", @@ -115,13 +108,13 @@ export let postLogin = (req: Request, res: Response, next: NextFunction) => { if (errors) { req.flash("errors", "Unable to login at this time. Please try again."); - return res.redirect(RouteStore.signup); + return res.redirect("/signup"); } passport.authenticate("local", (err: Error, user: DashUserModel, info: IVerifyOptions) => { if (err) { next(err); return; } if (!user) { - return res.redirect(RouteStore.signup); + return res.redirect("/signup"); } req.logIn(user, (err) => { if (err) { next(err); return; } @@ -141,7 +134,7 @@ export let getLogout = (req: Request, res: Response) => { if (sess) { sess.destroy((err) => { if (err) { console.log(err); } }); } - res.redirect(RouteStore.login); + res.redirect("/login"); }; export let getForgot = function (req: Request, res: Response) { @@ -155,7 +148,6 @@ export let postForgot = function (req: Request, res: Response, next: NextFunctio const email = req.body.email; async.waterfall([ function (done: any) { - let token: string; c.randomBytes(20, function (err: any, buffer: Buffer) { if (err) { done(null); @@ -168,7 +160,7 @@ export let postForgot = function (req: Request, res: Response, next: NextFunctio User.findOne({ email }, function (err, user: DashUserModel) { if (!user) { // NO ACCOUNT WITH SUBMITTED EMAIL - res.redirect(RouteStore.forgot); + res.redirect("/forgotPassword"); return; } user.passwordResetToken = token; @@ -192,7 +184,7 @@ export let postForgot = function (req: Request, res: Response, next: NextFunctio subject: 'Dash Password Reset', text: 'You are receiving this because you (or someone else) have requested the reset of the password for your account.\n\n' + 'Please click on the following link, or paste this into your browser to complete the process:\n\n' + - 'http://' + req.headers.host + '/reset/' + token + '\n\n' + + 'http://' + req.headers.host + '/resetPassword/' + token + '\n\n' + 'If you did not request this, please ignore this email and your password will remain unchanged.\n' } as MailOptions; smtpTransport.sendMail(mailOptions, function (err: Error | null) { @@ -202,14 +194,14 @@ export let postForgot = function (req: Request, res: Response, next: NextFunctio } ], function (err) { if (err) return next(err); - res.redirect(RouteStore.forgot); + res.redirect("/forgotPassword"); }); }; export let getReset = function (req: Request, res: Response) { User.findOne({ passwordResetToken: req.params.token, passwordResetExpires: { $gt: Date.now() } }, function (err, user: DashUserModel) { if (!user || err) { - return res.redirect(RouteStore.forgot); + return res.redirect("/forgotPassword"); } res.render("reset.pug", { title: "Reset Password", @@ -239,7 +231,7 @@ export let postReset = function (req: Request, res: Response) { user.save(function (err) { if (err) { - res.redirect(RouteStore.login); + res.redirect("/login"); return; } req.logIn(user, function (err) { @@ -271,6 +263,6 @@ export let postReset = function (req: Request, res: Response) { }); } ], function (err) { - res.redirect(RouteStore.login); + res.redirect("/login"); }); };
\ No newline at end of file diff --git a/src/server/authentication/models/current_user_utils.ts b/src/server/authentication/models/current_user_utils.ts index 5b9bba47d..220c37e2b 100644 --- a/src/server/authentication/models/current_user_utils.ts +++ b/src/server/authentication/models/current_user_utils.ts @@ -1,4 +1,4 @@ -import { action, computed, observable, reaction, runInAction } from "mobx"; +import { action, computed, observable, reaction } from "mobx"; import * as rp from 'request-promise'; import { DocServer } from "../../../client/DocServer"; import { Docs } from "../../../client/documents/Documents"; @@ -9,12 +9,11 @@ import { Doc, DocListCast } from "../../../new_fields/Doc"; import { List } from "../../../new_fields/List"; import { listSpec } from "../../../new_fields/Schema"; import { ScriptField, ComputedField } from "../../../new_fields/ScriptField"; -import { Cast, PromiseValue } from "../../../new_fields/Types"; +import { Cast, PromiseValue, StrCast } from "../../../new_fields/Types"; import { Utils } from "../../../Utils"; -import { RouteStore } from "../../RouteStore"; -import { InkingControl } from "../../../client/views/InkingControl"; -import { DragManager } from "../../../client/util/DragManager"; import { nullAudio } from "../../../new_fields/URLField"; +import { DragManager } from "../../../client/util/DragManager"; +import { InkingControl } from "../../../client/views/InkingControl"; export class CurrentUserUtils { private static curr_id: string; @@ -42,12 +41,13 @@ export class CurrentUserUtils { } // setup the "creator" buttons for the sidebar-- eg. the default set of draggable document creation tools - static setupCreatorButtons(doc: Doc) { - let notes = CurrentUserUtils.setupNoteTypes(doc); + static setupCreatorButtons(doc: Doc, buttons?: string[]) { + const notes = CurrentUserUtils.setupNoteTypes(doc); doc.noteTypes = Docs.Create.TreeDocument(notes, { title: "Note Types", height: 75 }); doc.activePen = doc; - let docProtoData: { title: string, icon: string, drag?: string, ignoreClick?: boolean, click?: string, ischecked?: string, activePen?: Doc, backgroundColor?: string, dragFactory?: Doc }[] = [ + const docProtoData: { title: string, icon: string, drag?: string, ignoreClick?: boolean, click?: string, ischecked?: string, activePen?: Doc, backgroundColor?: string, dragFactory?: Doc }[] = [ { title: "collection", icon: "folder", ignoreClick: true, drag: 'Docs.Create.FreeformDocument([], { nativeWidth: undefined, nativeHeight: undefined, width: 150, height: 100, title: "freeform" })' }, + { title: "preview", icon: "expand", ignoreClick: true, drag: 'Docs.Create.DocumentDocument(ComputedField.MakeFunction("selectedDocs(this,true,[_last_])?.[0]"), { width: 250, height: 250, title: "container" })' }, { title: "todo item", icon: "check", ignoreClick: true, drag: 'getCopy(this.dragFactory, true)', dragFactory: notes[notes.length - 1] }, { title: "web page", icon: "globe-asia", ignoreClick: true, drag: 'Docs.Create.WebDocument("https://en.wikipedia.org/wiki/Hedgehog", { width: 300, height: 300, title: "New Webpage" })' }, { title: "cat image", icon: "cat", ignoreClick: true, drag: 'Docs.Create.ImageDocument("https://upload.wikimedia.org/wikipedia/commons/thumb/3/3a/Cat03.jpg/1200px-Cat03.jpg", { width: 200, title: "an image of a cat" })' }, @@ -61,7 +61,7 @@ export class CurrentUserUtils { { title: "use scrubber", icon: "eraser", click: 'activateScrubber(this.activePen.pen = sameDocs(this.activePen.pen, this) ? undefined : this);', ischecked: `sameDocs(this.activePen.pen, this)`, backgroundColor: "green", activePen: doc }, { title: "use drag", icon: "mouse-pointer", click: 'deactivateInk();this.activePen.pen = this;', ischecked: `sameDocs(this.activePen.pen, this)`, backgroundColor: "white", activePen: doc }, ]; - return docProtoData.map(data => Docs.Create.FontIconDocument({ + return docProtoData.filter(d => !buttons || !buttons.includes(d.title)).map(data => Docs.Create.FontIconDocument({ nativeWidth: 100, nativeHeight: 100, width: 100, height: 100, dropAction: data.click ? "copy" : undefined, title: data.title, icon: data.icon, ignoreClick: data.ignoreClick, onDragStart: data.drag ? ScriptField.MakeFunction(data.drag) : undefined, onClick: data.click ? ScriptField.MakeScript(data.click) : undefined, ischecked: data.ischecked ? ComputedField.MakeFunction(data.ischecked) : undefined, activePen: data.activePen, @@ -69,6 +69,27 @@ export class CurrentUserUtils { })); } + static async updateCreatorButtons(doc: Doc) { + const toolsBtn = await Cast(doc.ToolsBtn, Doc); + if (toolsBtn) { + const stackingDoc = await Cast(toolsBtn.sourcePanel, Doc); + if (stackingDoc) { + const stackdocs = await Cast(stackingDoc.data, listSpec(Doc)); + if (stackdocs) { + const dragset = await Cast(stackdocs[0], Doc); + if (dragset) { + const dragdocs = await Cast(dragset.data, listSpec(Doc)); + if (dragdocs) { + const dragDocs = await Promise.all(dragdocs); + const newButtons = this.setupCreatorButtons(doc, dragDocs.map(d => StrCast(d.title))); + newButtons.map(nb => Doc.AddDocToList(dragset, "data", nb)); + } + } + } + } + } + } + // setup the Creator button which will display the creator panel. This panel will include the drag creators and the color picker. when clicked, this panel will be displayed in the target container (ie, sidebarContainer) static setupToolsPanel(sidebarContainer: Doc, doc: Doc) { // setup a masonry view of all he creators @@ -203,11 +224,12 @@ export class CurrentUserUtils { doc.undoBtn && reaction(() => UndoManager.undoStack.slice(), () => Doc.GetProto(doc.undoBtn as Doc).opacity = UndoManager.CanUndo() ? 1 : 0.4, { fireImmediately: true }); doc.redoBtn && reaction(() => UndoManager.redoStack.slice(), () => Doc.GetProto(doc.redoBtn as Doc).opacity = UndoManager.CanRedo() ? 1 : 0.4, { fireImmediately: true }); + this.updateCreatorButtons(doc); return doc; } - public static loadCurrentUser() { - return rp.get(Utils.prepend(RouteStore.getCurrUser)).then(response => { + public static async loadCurrentUser() { + return rp.get(Utils.prepend("/getCurrentUser")).then(response => { if (response) { const result: { id: string, email: string } = JSON.parse(response); return result; @@ -220,7 +242,7 @@ export class CurrentUserUtils { public static async loadUserDocument({ id, email }: { id: string, email: string }) { this.curr_id = id; Doc.CurrentUserEmail = email; - await rp.get(Utils.prepend(RouteStore.getUserDocumentId)).then(id => { + await rp.get(Utils.prepend("/getUserDocumentId")).then(id => { if (id && id !== "guest") { return DocServer.GetRefField(id).then(async field => Doc.SetUserDoc(await this.updateUserDocument(field instanceof Doc ? field : new Doc(id, true)))); @@ -279,7 +301,7 @@ export class CurrentUserUtils { if (this._northstarCatalog && CurrentUserUtils._northstarSchemas) { this._northstarCatalog.schemas!.push(schema); CurrentUserUtils._northstarSchemas.push(schemaDoc); - let schemas = Cast(CurrentUserUtils.UserDocument.DBSchemas, listSpec("string"), []); + const schemas = Cast(CurrentUserUtils.UserDocument.DBSchemas, listSpec("string"), []); schemas.push(schema.displayName!); CurrentUserUtils.UserDocument.DBSchemas = new List<string>(schemas); } diff --git a/src/server/authentication/models/user_model.ts b/src/server/authentication/models/user_model.ts index 45fbf23b1..78e39dbc1 100644 --- a/src/server/authentication/models/user_model.ts +++ b/src/server/authentication/models/user_model.ts @@ -1,20 +1,8 @@ //@ts-ignore import * as bcrypt from "bcrypt-nodejs"; //@ts-ignore -import * as mongoose from "mongoose"; -var url = 'mongodb://localhost:27017/Dash'; +import * as mongoose from 'mongoose'; -mongoose.connect(url, { useNewUrlParser: true }); - -mongoose.connection.on('connected', function () { - console.log('Stablished connection on ' + url); -}); -mongoose.connection.on('error', function (error) { - console.log('Something wrong happened: ' + error); -}); -mongoose.connection.on('disconnected', function () { - console.log('connection closed'); -}); export type DashUserModel = mongoose.Document & { email: String, password: string, @@ -85,7 +73,11 @@ userSchema.pre("save", function save(next) { }); const comparePassword: comparePasswordFunction = function (this: DashUserModel, candidatePassword, cb) { + // Choose one of the following bodies for authentication logic. + // secure bcrypt.compare(candidatePassword, this.password, cb); + // bypass password + // cb(undefined, true); }; userSchema.methods.comparePassword = comparePassword; diff --git a/src/server/credentials/CredentialsLoader.ts b/src/server/credentials/CredentialsLoader.ts new file mode 100644 index 000000000..e3f4d167b --- /dev/null +++ b/src/server/credentials/CredentialsLoader.ts @@ -0,0 +1,29 @@ +import { readFile } from "fs"; + +export namespace GoogleCredentialsLoader { + + export interface InstalledCredentials { + client_id: string; + project_id: string; + auth_uri: string; + token_uri: string; + auth_provider_x509_cert_url: string; + client_secret: string; + redirect_uris: string[]; + } + + export let ProjectCredentials: InstalledCredentials; + + export async function loadCredentials() { + ProjectCredentials = await new Promise<InstalledCredentials>(resolve => { + readFile(__dirname + '/google_project_credentials.json', function processClientSecrets(err, content) { + if (err) { + console.log('Error loading client secret file: ' + err); + return; + } + resolve(JSON.parse(content.toString()).installed); + }); + }); + } + +} diff --git a/src/server/credentials/google_docs_credentials.json b/src/server/credentials/google_project_credentials.json index 955c5a3c1..955c5a3c1 100644 --- a/src/server/credentials/google_docs_credentials.json +++ b/src/server/credentials/google_project_credentials.json diff --git a/src/server/database.ts b/src/server/database.ts index db86b472d..6e0771c11 100644 --- a/src/server/database.ts +++ b/src/server/database.ts @@ -5,19 +5,58 @@ import { Utils, emptyFunction } from '../Utils'; import { DashUploadUtils } from './DashUploadUtils'; import { Credentials } from 'google-auth-library'; import { GoogleApiServerUtils } from './apis/google/GoogleApiServerUtils'; +import * as mongoose from 'mongoose'; export namespace Database { + export let disconnect: Function; + const schema = 'Dash'; + const port = 27017; + export const url = `mongodb://localhost:${port}/${schema}`; + + enum ConnectionStates { + disconnected = 0, + connected = 1, + connecting = 2, + disconnecting = 3, + uninitialized = 99, + } + + export async function tryInitializeConnection() { + try { + const { connection } = mongoose; + disconnect = async () => new Promise<any>(resolve => connection.close(resolve)); + if (connection.readyState === ConnectionStates.disconnected) { + await new Promise<void>((resolve, reject) => { + connection.on('error', reject); + connection.on('connected', () => { + console.log(`mongoose established default connection at ${url}`); + resolve(); + }); + mongoose.connect(url, { useNewUrlParser: true }); + }); + } + } catch (e) { + console.error(`Mongoose FAILED to establish default connection at ${url} with the following error:`); + console.error(e); + console.log('Since a valid database connection is required to use Dash, the server process will now exit.\nPlease try again later.'); + process.exit(1); + } + } + class Database { public static DocumentsCollection = 'documents'; private MongoClient = mongodb.MongoClient; - private url = 'mongodb://localhost:27017/Dash'; private currentWrites: { [id: string]: Promise<void> } = {}; private db?: mongodb.Db; private onConnect: (() => void)[] = []; constructor() { - this.MongoClient.connect(this.url, (err, client) => { + this.MongoClient.connect(url, (_err, client) => { + if (!client) { + console.error("\nPlease start MongoDB by running 'mongod' in a terminal before continuing...\n"); + process.exit(0); + } this.db = client.db(); this.onConnect.forEach(fn => fn()); }); @@ -25,7 +64,7 @@ export namespace Database { public async update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = Database.DocumentsCollection) { if (this.db) { - let collection = this.db.collection(collectionName); + const collection = this.db.collection(collectionName); const prom = this.currentWrites[id]; let newProm: Promise<void>; const run = (): Promise<void> => { @@ -50,7 +89,7 @@ export namespace Database { public replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = Database.DocumentsCollection) { if (this.db) { - let collection = this.db.collection(collectionName); + const collection = this.db.collection(collectionName); const prom = this.currentWrites[id]; let newProm: Promise<void>; const run = (): Promise<void> => { @@ -247,7 +286,7 @@ export namespace Database { }; export const QueryUploadHistory = async (contentSize: number) => { - return SanitizedSingletonQuery<DashUploadUtils.UploadInformation>({ contentSize }, AuxiliaryCollections.GooglePhotosUploadHistory); + return SanitizedSingletonQuery<DashUploadUtils.ImageUploadInformation>({ contentSize }, AuxiliaryCollections.GooglePhotosUploadHistory); }; export namespace GoogleAuthenticationToken { @@ -256,7 +295,7 @@ export namespace Database { export type StoredCredentials = Credentials & { _id: string }; - export const Fetch = async (userId: string, removeId = true) => { + export const Fetch = async (userId: string, removeId = true): Promise<Opt<StoredCredentials>> => { return SanitizedSingletonQuery<StoredCredentials>({ userId }, GoogleAuthentication, removeId); }; @@ -276,7 +315,7 @@ export namespace Database { } - export const LogUpload = async (information: DashUploadUtils.UploadInformation) => { + export const LogUpload = async (information: DashUploadUtils.ImageUploadInformation) => { const bundle = { _id: Utils.GenerateDeterministicGuid(String(information.contentSize!)), ...information diff --git a/src/server/downsize.ts b/src/server/downsize.ts index ed68fbecc..cd0d83812 100644 --- a/src/server/downsize.ts +++ b/src/server/downsize.ts @@ -17,7 +17,7 @@ fs.readdir(folder, async (err, files) => { // }); for (const file of files) { const filesplit = file.split("."); - let resizers = [ + const resizers = [ { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: "_m" }, { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: "_l" }, diff --git a/src/server/index.ts b/src/server/index.ts index ddd909479..6b3dfd614 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -1,1257 +1,158 @@ require('dotenv').config(); -import * as bodyParser from 'body-parser'; -import { exec, ExecOptions } from 'child_process'; -import * as cookieParser from 'cookie-parser'; -import * as express from 'express'; -import * as session from 'express-session'; -import * as expressValidator from 'express-validator'; -import * as formidable from 'formidable'; -import * as fs from 'fs'; -import * as sharp from 'sharp'; -import * as Pdfjs from 'pdfjs-dist'; -const imageDataUri = require('image-data-uri'); +import { GoogleApiServerUtils } from "./apis/google/GoogleApiServerUtils"; import * as mobileDetect from 'mobile-detect'; -import * as passport from 'passport'; import * as path from 'path'; -import * as request from 'request'; -import * as io from 'socket.io'; -import { Socket } from 'socket.io'; -import * as webpack from 'webpack'; -import * as wdm from 'webpack-dev-middleware'; -import * as whm from 'webpack-hot-middleware'; -import { Utils } from '../Utils'; -import { getForgot, getLogin, getLogout, getReset, getSignup, postForgot, postLogin, postReset, postSignup } from './authentication/controllers/user_controller'; -import { DashUserModel } from './authentication/models/user_model'; -import { Client } from './Client'; import { Database } from './database'; -import { MessageStore, Transferable, Types, Diff, YoutubeQueryTypes as YoutubeQueryType, YoutubeQueryInput } from "./Message"; -import { RouteStore } from './RouteStore'; -import v4 = require('uuid/v4'); -const app = express(); -const config = require('../../webpack.config'); -import { createCanvas } from "canvas"; -const compiler = webpack(config); -const port = 1050; // default port to listen -const serverPort = 4321; -import expressFlash = require('express-flash'); -import flash = require('connect-flash'); -import { Search } from './Search'; -import * as Archiver from 'archiver'; -var AdmZip = require('adm-zip'); -import * as YoutubeApi from "./apis/youtube/youtubeApiSample"; -import { Response } from 'express-serve-static-core'; -import { GoogleApiServerUtils } from "./apis/google/GoogleApiServerUtils"; -const MongoStore = require('connect-mongo')(session); -const mongoose = require('mongoose'); -const probe = require("probe-image-size"); -const pdf = require('pdf-parse'); -var findInFiles = require('find-in-files'); -import { GooglePhotosUploadUtils } from './apis/google/GooglePhotosUploadUtils'; -import * as qs from 'query-string'; -import { Opt } from '../new_fields/Doc'; import { DashUploadUtils } from './DashUploadUtils'; -import { BatchedArray, TimeUnit } from 'array-batcher'; -import { ParsedPDF } from "./PdfTypes"; -import { reject } from 'bluebird'; -import { ExifData } from 'exif'; -import { Result } from '../client/northstar/model/idea/idea'; import RouteSubscriber from './RouteSubscriber'; - -const download = (url: string, dest: fs.PathLike) => request.get(url).pipe(fs.createWriteStream(dest)); -let youtubeApiKey: string; -YoutubeApi.readApiKey((apiKey: string) => youtubeApiKey = apiKey); - -const release = process.env.RELEASE === "true"; -if (process.env.RELEASE === "true") { - console.log("Running server in release mode"); -} else { - console.log("Running server in debug mode"); -} -console.log(process.env.PWD); -let clientUtils = fs.readFileSync("./src/client/util/ClientUtils.ts.temp", "utf8"); -clientUtils = `//AUTO-GENERATED FILE: DO NOT EDIT\n${clientUtils.replace('"mode"', String(release))}`; -fs.writeFileSync("./src/client/util/ClientUtils.ts", clientUtils, "utf8"); - -const mongoUrl = 'mongodb://localhost:27017/Dash'; -mongoose.connection.readyState === 0 && mongoose.connect(mongoUrl); -mongoose.connection.on('connected', () => console.log("connected")); - -// SESSION MANAGEMENT AND AUTHENTICATION MIDDLEWARE -// ORDER OF IMPORTS MATTERS - -app.use(cookieParser()); -app.use(session({ - secret: "64d6866242d3b5a5503c675b32c9605e4e90478e9b77bcf2bc", - resave: true, - cookie: { maxAge: 7 * 24 * 60 * 60 * 1000 }, - saveUninitialized: true, - store: new MongoStore({ url: 'mongodb://localhost:27017/Dash' }) -})); - -app.use(flash()); -app.use(expressFlash()); -app.use(bodyParser.json({ limit: "10mb" })); -app.use(bodyParser.urlencoded({ extended: true })); -app.use(expressValidator()); -app.use(passport.initialize()); -app.use(passport.session()); -app.use((req, res, next) => { - res.locals.user = req.user; - next(); -}); - -app.get("/hello", (req, res) => res.send("<p>Hello</p>")); - -enum Method { - GET, - POST -} - -export type ValidationHandler = (user: DashUserModel, req: express.Request, res: express.Response) => any | Promise<any>; -export type RejectionHandler = (req: express.Request, res: express.Response) => any | Promise<any>; -export type ErrorHandler = (req: express.Request, res: express.Response, error: any) => any | Promise<any>; - -const LoginRedirect: RejectionHandler = (_req, res) => res.redirect(RouteStore.login); - -export interface RouteInitializer { - method: Method; - subscribers: string | RouteSubscriber | (string | RouteSubscriber)[]; - onValidation: ValidationHandler; - onRejection?: RejectionHandler; - onError?: ErrorHandler; -} - -const isSharedDocAccess = (target: string) => { - const shared = qs.parse(qs.extract(target), { sort: false }).sharing === "true"; - const docAccess = target.startsWith("/doc/"); - return shared && docAccess; -}; +import initializeServer from './server_initialization'; +import RouteManager, { Method, _success, _permission_denied, _error, _invalid, PublicHandler } from './RouteManager'; +import * as qs from 'query-string'; +import UtilManager from './ApiManagers/UtilManager'; +import { SearchManager, SolrManager } from './ApiManagers/SearchManager'; +import UserManager from './ApiManagers/UserManager'; +import { WebSocket } from './Websocket/Websocket'; +import DownloadManager from './ApiManagers/DownloadManager'; +import { GoogleCredentialsLoader } from './credentials/CredentialsLoader'; +import DeleteManager from "./ApiManagers/DeleteManager"; +import PDFManager from "./ApiManagers/PDFManager"; +import UploadManager from "./ApiManagers/UploadManager"; +import { log_execution, Email } from "./ActionUtilities"; +import GeneralGoogleManager from "./ApiManagers/GeneralGoogleManager"; +import GooglePhotosManager from "./ApiManagers/GooglePhotosManager"; +import { Logger } from "./ProcessFactory"; +import { yellow, red } from "colors"; +import { Session } from "./Session/session"; +import { DashSessionAgent } from "./DashSession"; + +export const onWindows = process.platform === "win32"; +export let sessionAgent: Session.AppliedSessionAgent; +export const publicDirectory = path.resolve(__dirname, "public"); +export const filesDirectory = path.resolve(publicDirectory, "files"); /** - * Please invoke this function when adding a new route to Dash's server. - * It ensures that any requests leading to or containing user-sensitive information - * does not execute unless Passport authentication detects a user logged in. - * @param method whether or not the request is a GET or a POST - * @param handler the action to invoke, recieving a DashUserModel and, as expected, the Express.Request and Express.Response - * @param onRejection an optional callback invoked on return if no user is found to be logged in - * @param subscribers the forward slash prepended path names (reference and add to RouteStore.ts) that will all invoke the given @param handler + * These are the functions run before the server starts + * listening. Anything that must be complete + * before clients can access the server should be run or awaited here. */ -function addSecureRoute(initializer: RouteInitializer) { - const { method, subscribers, onValidation, onRejection, onError } = initializer; - let abstracted = async (req: express.Request, res: express.Response) => { - const { user, originalUrl: target } = req; - if (user || isSharedDocAccess(target)) { - try { - await onValidation(user as any, req, res); - } catch (e) { - if (onError) { - onError(req, res, e); - } else { - _error(res, `The server encountered an internal error handling ${target}.`, e); - } - } - } else { - req.session!.target = target; - try { - await (onRejection || LoginRedirect)(req, res); - } catch (e) { - if (onError) { - onError(req, res, e); - } else { - _error(res, `The server encountered an internal error when rejecting ${target}.`, e); - } - } - } - }; - const subscribe = (subscriber: RouteSubscriber | string) => { - let route: string; - if (typeof subscriber === "string") { - route = subscriber; - } else { - route = subscriber.build; - } - switch (method) { - case Method.GET: - app.get(route, abstracted); - break; - case Method.POST: - app.post(route, abstracted); - break; - } - }; - if (Array.isArray(subscribers)) { - subscribers.forEach(subscribe); - } else { - subscribe(subscribers); - } -} - -// STATIC FILE SERVING -app.use(express.static(__dirname + RouteStore.public)); -app.use(RouteStore.images, express.static(__dirname + RouteStore.public)); - -app.get("/pull", (req, res) => - exec('"C:\\Program Files\\Git\\git-bash.exe" -c "git pull"', (err, stdout, stderr) => { - if (err) { - res.send(err.message); - return; - } - res.redirect("/"); - })); - -app.get("/buxton", (req, res) => { - let cwd = '../scraping/buxton'; - - let onResolved = (stdout: string) => { console.log(stdout); res.redirect("/"); }; - let onRejected = (err: any) => { console.error(err.message); res.send(err); }; - let tryPython3 = () => command_line('python3 scraper.py', cwd).then(onResolved, onRejected); - - command_line('python scraper.py', cwd).then(onResolved, tryPython3); -}); - -const STATUS = { - OK: 200, - BAD_REQUEST: 400, - EXECUTION_ERROR: 500, - PERMISSION_DENIED: 403 -}; - -const command_line = (command: string, fromDirectory?: string) => { - return new Promise<string>((resolve, reject) => { - let options: ExecOptions = {}; - if (fromDirectory) { - options.cwd = path.join(__dirname, fromDirectory); - } - exec(command, options, (err, stdout) => err ? reject(err) : resolve(stdout)); - }); -}; - -const read_text_file = (relativePath: string) => { - let target = path.join(__dirname, relativePath); - return new Promise<string>((resolve, reject) => { - fs.readFile(target, (err, data) => err ? reject(err) : resolve(data.toString())); - }); -}; - -const write_text_file = (relativePath: string, contents: any) => { - let target = path.join(__dirname, relativePath); - return new Promise<void>((resolve, reject) => { - fs.writeFile(target, contents, (err) => err ? reject(err) : resolve()); +async function preliminaryFunctions() { + await Logger.initialize(); + await GoogleCredentialsLoader.loadCredentials(); + GoogleApiServerUtils.processProjectCredentials(); + await DashUploadUtils.buildFileDirectories(); + await log_execution({ + startMessage: "attempting to initialize mongodb connection", + endMessage: "connection outcome determined", + action: Database.tryInitializeConnection }); -}; - -app.get("/version", (req, res) => { - exec('"C:\\Program Files\\Git\\bin\\git.exe" rev-parse HEAD', (err, stdout, stderr) => { - if (err) { - res.send(err.message); - return; - } - res.send(stdout); - }); -}); - -// SEARCH -const solrURL = "http://localhost:8983/solr/#/dash"; - -// GETTERS - -app.get("/textsearch", async (req, res) => { - let q = req.query.q; - if (q === undefined) { - res.send([]); - return; - } - let results = await findInFiles.find({ 'term': q, 'flags': 'ig' }, uploadDirectory + "text", ".txt$"); - let resObj: { ids: string[], numFound: number, lines: string[] } = { ids: [], numFound: 0, lines: [] }; - for (var result in results) { - resObj.ids.push(path.basename(result, ".txt").replace(/upload_/, "")); - resObj.lines.push(results[result].line); - resObj.numFound++; - } - res.send(resObj); -}); - -app.get("/search", async (req, res) => { - const solrQuery: any = {}; - ["q", "fq", "start", "rows", "hl", "hl.fl"].forEach(key => solrQuery[key] = req.query[key]); - if (solrQuery.q === undefined) { - res.send([]); - return; - } - let results = await Search.Instance.search(solrQuery); - res.send(results); -}); - -function msToTime(duration: number) { - let milliseconds = Math.floor((duration % 1000) / 100), - seconds = Math.floor((duration / 1000) % 60), - minutes = Math.floor((duration / (1000 * 60)) % 60), - hours = Math.floor((duration / (1000 * 60 * 60)) % 24); - - let hoursS = (hours < 10) ? "0" + hours : hours; - let minutesS = (minutes < 10) ? "0" + minutes : minutes; - let secondsS = (seconds < 10) ? "0" + seconds : seconds; - - return hoursS + ":" + minutesS + ":" + secondsS + "." + milliseconds; } -async function getDocs(id: string) { - const files = new Set<string>(); - const docs: { [id: string]: any } = {}; - const fn = (doc: any): string[] => { - const id = doc.id; - if (typeof id === "string" && id.endsWith("Proto")) { - //Skip protos - return []; - } - const ids: string[] = []; - for (const key in doc.fields) { - if (!doc.fields.hasOwnProperty(key)) { - continue; - } - const field = doc.fields[key]; - if (field === undefined || field === null) { - continue; - } - - if (field.__type === "proxy" || field.__type === "prefetch_proxy") { - ids.push(field.fieldId); - } else if (field.__type === "script" || field.__type === "computed") { - if (field.captures) { - ids.push(field.captures.fieldId); - } - } else if (field.__type === "list") { - ids.push(...fn(field)); - } else if (typeof field === "string") { - const re = /"(?:dataD|d)ocumentId"\s*:\s*"([\w\-]*)"/g; - let match: string[] | null; - while ((match = re.exec(field)) !== null) { - ids.push(match[1]); - } - } else if (field.__type === "RichTextField") { - const re = /"href"\s*:\s*"(.*?)"/g; - let match: string[] | null; - while ((match = re.exec(field.Data)) !== null) { - const urlString = match[1]; - const split = new URL(urlString).pathname.split("doc/"); - if (split.length > 1) { - ids.push(split[split.length - 1]); - } - } - const re2 = /"src"\s*:\s*"(.*?)"/g; - while ((match = re2.exec(field.Data)) !== null) { - const urlString = match[1]; - const pathname = new URL(urlString).pathname; - files.add(pathname); - } - } else if (["audio", "image", "video", "pdf", "web"].includes(field.__type)) { - const url = new URL(field.url); - const pathname = url.pathname; - files.add(pathname); - } - } - - if (doc.id) { - docs[doc.id] = doc; - } - return ids; - }; - await Database.Instance.visit([id], fn); - return { id, docs, files }; -} -app.get("/serializeDoc/:docId", async (req, res) => { - const { docs, files } = await getDocs(req.params.docId); - res.send({ docs, files: Array.from(files) }); -}); - -export type Hierarchy = { [id: string]: string | Hierarchy }; -export type ZipMutator = (file: Archiver.Archiver) => void | Promise<void>; - -addSecureRoute({ - method: Method.GET, - subscribers: new RouteSubscriber(RouteStore.imageHierarchyExport).add('docId'), - onValidation: async (_user, req, res) => { - const id = req.params.docId; - const hierarchy: Hierarchy = {}; - await targetedVisitorRecursive(id, hierarchy); - BuildAndDispatchZip(res, async zip => { - await hierarchyTraverserRecursive(zip, hierarchy); - }); - } -}); - -const BuildAndDispatchZip = async (res: Response, mutator: ZipMutator): Promise<void> => { - const zip = Archiver('zip'); - zip.pipe(res); - await mutator(zip); - return zip.finalize(); -}; - -const targetedVisitorRecursive = async (seedId: string, hierarchy: Hierarchy): Promise<void> => { - const local: Hierarchy = {}; - const { title, data } = await getData(seedId); - const label = `${title} (${seedId})`; - if (Array.isArray(data)) { - hierarchy[label] = local; - await Promise.all(data.map(proxy => targetedVisitorRecursive(proxy.fieldId, local))); - } else { - hierarchy[label + path.extname(data)] = data; - } -}; +/** + * Either clustered together as an API manager + * or individually referenced below, by the completion + * of this function's execution, all routes will + * be registered on the server + * @param router the instance of the route manager + * that will manage the registration of new routes + * with the server + */ +function routeSetter({ isRelease, addSupervisedRoute, logRegistrationOutcome }: RouteManager) { + const managers = [ + new UserManager(), + new UploadManager(), + new DownloadManager(), + new SearchManager(), + new PDFManager(), + new DeleteManager(), + new UtilManager(), + new GeneralGoogleManager(), + new GooglePhotosManager(), + ]; + + // initialize API Managers + console.log(yellow("\nregistering server routes...")); + managers.forEach(manager => manager.register(addSupervisedRoute)); + + /** + * Accessing root index redirects to home + */ + addSupervisedRoute({ + method: Method.GET, + subscription: "/", + secureHandler: ({ res }) => res.redirect("/home") + }); -const getData = async (seedId: string): Promise<{ data: string | any[], title: string }> => { - return new Promise<{ data: string | any[], title: string }>((resolve, reject) => { - Database.Instance.getDocument(seedId, async (result: any) => { - const { data, proto, title } = result.fields; - if (data) { - if (data.url) { - resolve({ data: data.url, title }); - } else if (data.fields) { - resolve({ data: data.fields, title }); - } else { - reject(); - } - } - if (proto) { - getData(proto.fieldId).then(resolve, reject); - } - }); + addSupervisedRoute({ + method: Method.GET, + subscription: "/serverHeartbeat", + secureHandler: ({ res }) => res.send(true) }); -}; -const hierarchyTraverserRecursive = async (file: Archiver.Archiver, hierarchy: Hierarchy, prefix = "Dash Export"): Promise<void> => { - for (const key of Object.keys(hierarchy)) { - const result = hierarchy[key]; - if (typeof result === "string") { - let path: string; - let matches: RegExpExecArray | null; - if ((matches = /\:1050\/files\/(upload\_[\da-z]{32}.*)/g.exec(result)) !== null) { - path = `${__dirname}/public/files/${matches[1]}`; + addSupervisedRoute({ + method: Method.GET, + subscription: new RouteSubscriber("kill").add("key"), + secureHandler: ({ req, res }) => { + if (req.params.key === process.env.session_key) { + res.send("<img src='https://media.giphy.com/media/NGIfqtcS81qi4/giphy.gif' style='width:100%;height:100%;'/>"); + setTimeout(() => { + sessionAgent.killSession("an authorized user has manually ended the server session via the /kill route", false); + }, 5000); } else { - const information = await DashUploadUtils.UploadImage(result); - path = information.mediaPaths[0]; + res.redirect("/home"); } - file.file(path, { name: key, prefix }); - } else { - await hierarchyTraverserRecursive(file, result, `${prefix}/${key}`); } - } -}; - -app.get("/downloadId/:docId", async (req, res) => { - res.set('Content-disposition', `attachment;`); - res.set('Content-Type', "application/zip"); - const { id, docs, files } = await getDocs(req.params.docId); - const docString = JSON.stringify({ id, docs }); - const zip = Archiver('zip'); - zip.pipe(res); - zip.append(docString, { name: "doc.json" }); - files.forEach(val => { - zip.file(__dirname + RouteStore.public + val, { name: val.substring(1) }); }); - zip.finalize(); -}); -app.post("/uploadDoc", (req, res) => { - let form = new formidable.IncomingForm(); - form.keepExtensions = true; - // let path = req.body.path; - const ids: { [id: string]: string } = {}; - let remap = true; - const getId = (id: string): string => { - if (!remap) return id; - if (id.endsWith("Proto")) return id; - if (id in ids) { - return ids[id]; - } else { - return ids[id] = v4(); - } + const serve: PublicHandler = ({ req, res }) => { + const detector = new mobileDetect(req.headers['user-agent'] || ""); + const filename = detector.mobile() !== null ? 'mobile/image.html' : 'index.html'; + res.sendFile(path.join(__dirname, '../../deploy/' + filename)); }; - const mapFn = (doc: any) => { - if (doc.id) { - doc.id = getId(doc.id); - } - for (const key in doc.fields) { - if (!doc.fields.hasOwnProperty(key)) { - continue; - } - const field = doc.fields[key]; - if (field === undefined || field === null) { - continue; - } - if (field.__type === "proxy" || field.__type === "prefetch_proxy") { - field.fieldId = getId(field.fieldId); - } else if (field.__type === "script" || field.__type === "computed") { - if (field.captures) { - field.captures.fieldId = getId(field.captures.fieldId); - } - } else if (field.__type === "list") { - mapFn(field); - } else if (typeof field === "string") { - const re = /("(?:dataD|d)ocumentId"\s*:\s*")([\w\-]*)"/g; - doc.fields[key] = (field as any).replace(re, (match: any, p1: string, p2: string) => { - return `${p1}${getId(p2)}"`; - }); - } else if (field.__type === "RichTextField") { - const re = /("href"\s*:\s*")(.*?)"/g; - field.Data = field.Data.replace(re, (match: any, p1: string, p2: string) => { - return `${p1}${getId(p2)}"`; - }); + addSupervisedRoute({ + method: Method.GET, + subscription: ["/home", new RouteSubscriber("doc").add("docId")], + secureHandler: serve, + publicHandler: ({ req, ...remaining }) => { + const { originalUrl: target } = req; + const sharing = qs.parse(qs.extract(req.originalUrl), { sort: false }).sharing === "true"; + const docAccess = target.startsWith("/doc/"); + if (sharing && docAccess) { + serve({ req, ...remaining }); } } - }; - form.parse(req, async (err, fields, files) => { - remap = fields.remap !== "false"; - let id: string = ""; - try { - for (const name in files) { - const path_2 = files[name].path; - const zip = new AdmZip(path_2); - zip.getEntries().forEach((entry: any) => { - if (!entry.entryName.startsWith("files/")) return; - let dirname = path.dirname(entry.entryName) + "/"; - let extname = path.extname(entry.entryName); - let basename = path.basename(entry.entryName).split(".")[0]; - // zip.extractEntryTo(dirname + basename + "_o" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_s" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_m" + extname, __dirname + RouteStore.public, true, false); - // zip.extractEntryTo(dirname + basename + "_l" + extname, __dirname + RouteStore.public, true, false); - try { - zip.extractEntryTo(entry.entryName, __dirname + RouteStore.public, true, false); - dirname = "/" + dirname; - - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_o" + extname)); - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_s" + extname)); - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_m" + extname)); - fs.createReadStream(__dirname + RouteStore.public + dirname + basename + extname).pipe(fs.createWriteStream(__dirname + RouteStore.public + dirname + basename + "_l" + extname)); - } catch (e) { - console.log(e); - } - }); - const json = zip.getEntry("doc.json"); - let docs: any; - try { - let data = JSON.parse(json.getData().toString("utf8")); - docs = data.docs; - id = data.id; - docs = Object.keys(docs).map(key => docs[key]); - docs.forEach(mapFn); - await Promise.all(docs.map((doc: any) => new Promise(res => Database.Instance.replace(doc.id, doc, (err, r) => { - err && console.log(err); - res(); - }, true, "newDocuments")))); - } catch (e) { console.log(e); } - fs.unlink(path_2, () => { }); - } - if (id) { - res.send(JSON.stringify(getId(id))); - } else { - res.send(JSON.stringify("error")); - } - } catch (e) { console.log(e); } }); -}); -app.get("/whosOnline", (req, res) => { - let users: any = { active: {}, inactive: {} }; - const now = Date.now(); + logRegistrationOutcome(); - for (const user in timeMap) { - const time = timeMap[user]; - const key = ((now - time) / 1000) < (60 * 5) ? "active" : "inactive"; - users[key][user] = `Last active ${msToTime(now - time)} ago`; - } - - res.send(users); -}); -app.get("/thumbnail/:filename", (req, res) => { - let filename = req.params.filename; - let noExt = filename.substring(0, filename.length - ".png".length); - let pagenumber = parseInt(noExt.split('-')[1]); - fs.exists(uploadDirectory + filename, (exists: boolean) => { - console.log(`${uploadDirectory + filename} ${exists ? "exists" : "does not exist"}`); - if (exists) { - let input = fs.createReadStream(uploadDirectory + filename); - probe(input, (err: any, result: any) => { - if (err) { - console.log(err); - console.log(`error on ${filename}`); - return; - } - res.send({ path: "/files/" + filename, width: result.width, height: result.height }); - }); - } - else { - LoadPage(uploadDirectory + filename.substring(0, filename.length - noExt.split('-')[1].length - ".PNG".length - 1) + ".pdf", pagenumber, res); - } - }); -}); - -function LoadPage(file: string, pageNumber: number, res: Response) { - console.log(file); - Pdfjs.getDocument(file).promise - .then((pdf: Pdfjs.PDFDocumentProxy) => { - let factory = new NodeCanvasFactory(); - console.log(pageNumber); - pdf.getPage(pageNumber).then((page: Pdfjs.PDFPageProxy) => { - console.log("reading " + page); - let viewport = page.getViewport(1 as any); - let canvasAndContext = factory.create(viewport.width, viewport.height); - let renderContext = { - canvasContext: canvasAndContext.context, - viewport: viewport, - canvasFactory: factory - }; - console.log("read " + pageNumber); - - page.render(renderContext).promise - .then(() => { - console.log("saving " + pageNumber); - let stream = canvasAndContext.canvas.createPNGStream(); - let pngFile = `${file.substring(0, file.length - ".pdf".length)}-${pageNumber}.PNG`; - let out = fs.createWriteStream(pngFile); - stream.pipe(out); - out.on("finish", () => { - console.log(`Success! Saved to ${pngFile}`); - let name = path.basename(pngFile); - res.send({ path: "/files/" + name, width: viewport.width, height: viewport.height }); - }); - }, (reason: string) => { - console.error(reason + ` ${pageNumber}`); - }); - }); - }); + // initialize the web socket (bidirectional communication: if a user changes + // a field on one client, that change must be broadcast to all other clients) + WebSocket.start(isRelease); } /** - * Anyone attempting to navigate to localhost at this port will - * first have to log in. + * This function can be used in two different ways. If not in release mode, + * this is simply the logic that is invoked to start the server. In release mode, + * however, this becomes the logic invoked by a single worker thread spawned by + * the main monitor (master) thread. */ -addSecureRoute({ - method: Method.GET, - subscribers: RouteStore.root, - onValidation: (_user, _req, res) => res.redirect(RouteStore.home) -}); - -addSecureRoute({ - method: Method.GET, - subscribers: RouteStore.getUsers, - onValidation: async (_user, _req, res) => { - const cursor = await Database.Instance.query({}, { email: 1, userDocumentId: 1 }, "users"); - const results = await cursor.toArray(); - res.send(results.map(user => ({ email: user.email, userDocumentId: user.userDocumentId }))); - }, -}); - -addSecureRoute({ - method: Method.GET, - subscribers: [RouteStore.home, RouteStore.openDocumentWithId], - onValidation: (_user, req, res) => { - let detector = new mobileDetect(req.headers['user-agent'] || ""); - let filename = detector.mobile() !== null ? 'mobile/image.html' : 'index.html'; - res.sendFile(path.join(__dirname, '../../deploy/' + filename)); - }, -}); - -addSecureRoute({ - method: Method.GET, - subscribers: RouteStore.getUserDocumentId, - onValidation: (user, _req, res) => res.send(user.userDocumentId), - onRejection: (_req, res) => res.send(undefined) -}); - -addSecureRoute({ - method: Method.GET, - subscribers: RouteStore.getCurrUser, - onValidation: (user, _req, res) => { res.send(JSON.stringify(user)); }, - onRejection: (_req, res) => res.send(JSON.stringify({ id: "__guest__", email: "" })) -}); - -const ServicesApiKeyMap = new Map<string, string | undefined>([ - ["face", process.env.FACE], - ["vision", process.env.VISION], - ["handwriting", process.env.HANDWRITING] -]); - -addSecureRoute({ - method: Method.GET, - subscribers: new RouteSubscriber(RouteStore.cognitiveServices).add('requestedservice'), - onValidation: (_user, req, res) => { - let service = req.params.requestedservice; - res.send(ServicesApiKeyMap.get(service)); - } -}); - -class NodeCanvasFactory { - create = (width: number, height: number) => { - var canvas = createCanvas(width, height); - var context = canvas.getContext('2d'); - return { - canvas: canvas, - context: context, - }; - } - - reset = (canvasAndContext: any, width: number, height: number) => { - canvasAndContext.canvas.width = width; - canvasAndContext.canvas.height = height; - } - - destroy = (canvasAndContext: any) => { - canvasAndContext.canvas.width = 0; - canvasAndContext.canvas.height = 0; - canvasAndContext.canvas = null; - canvasAndContext.context = null; - } -} - -const pngTypes = [".png", ".PNG"]; -const jpgTypes = [".jpg", ".JPG", ".jpeg", ".JPEG"]; -const uploadDirectory = __dirname + "/public/files/"; -const pdfDirectory = uploadDirectory + "text"; -DashUploadUtils.createIfNotExists(pdfDirectory); - -interface ImageFileResponse { - name: string; - path: string; - type: string; - exif: Opt<DashUploadUtils.EnrichedExifData>; -} - -addSecureRoute({ - method: Method.POST, - subscribers: RouteStore.upload, - onValidation: (_user, req, res) => { - let form = new formidable.IncomingForm(); - form.uploadDir = uploadDirectory; - form.keepExtensions = true; - form.parse(req, async (_err, _fields, files) => { - let results: ImageFileResponse[] = []; - for (const key in files) { - const { type, path: location, name } = files[key]; - const filename = path.basename(location); - let uploadInformation: Opt<DashUploadUtils.UploadInformation>; - if (filename.endsWith(".pdf")) { - let dataBuffer = fs.readFileSync(uploadDirectory + filename); - const result: ParsedPDF = await pdf(dataBuffer); - await new Promise<void>(resolve => { - const path = pdfDirectory + "/" + filename.substring(0, filename.length - ".pdf".length) + ".txt"; - fs.createWriteStream(path).write(result.text, error => { - if (!error) { - resolve(); - } else { - reject(error); - } - }); - }); - } else if (type.indexOf("audio") !== -1) { - // nothing to be done yet-- although transcribing the audio a la pdfs would make sense. - } else { - uploadInformation = await DashUploadUtils.UploadImage(uploadDirectory + filename, filename); - } - const exif = uploadInformation ? uploadInformation.exifData : undefined; - results.push({ name, type, path: `/files/${filename}`, exif }); - - } - _success(res, results); - }); - } -}); - -addSecureRoute({ - method: Method.POST, - subscribers: RouteStore.inspectImage, - onValidation: async (_user, req, res) => { - const { source } = req.body; - if (typeof source === "string") { - const uploadInformation = await DashUploadUtils.UploadImage(source); - return res.send(await DashUploadUtils.InspectImage(uploadInformation.mediaPaths[0])); - } - res.send({}); - } -}); - -addSecureRoute({ - method: Method.POST, - subscribers: RouteStore.dataUriToImage, - onValidation: (_user, req, res) => { - const uri = req.body.uri; - const filename = req.body.name; - if (!uri || !filename) { - res.status(401).send("incorrect parameters specified"); - return; - } - imageDataUri.outputFile(uri, uploadDirectory + filename).then((savedName: string) => { - const ext = path.extname(savedName); - let resizers = [ - { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, - { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: "_m" }, - { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: "_l" }, - ]; - let isImage = false; - if (pngTypes.includes(ext)) { - resizers.forEach(element => { - element.resizer = element.resizer.png(); - }); - isImage = true; - } else if (jpgTypes.includes(ext)) { - resizers.forEach(element => { - element.resizer = element.resizer.jpeg(); - }); - isImage = true; - } - if (isImage) { - resizers.forEach(resizer => { - fs.createReadStream(savedName).pipe(resizer.resizer).pipe(fs.createWriteStream(uploadDirectory + filename + resizer.suffix + ext)); - }); - } - res.send("/files/" + filename + ext); - }); - } -}); - -// AUTHENTICATION - -// Sign Up -app.get(RouteStore.signup, getSignup); -app.post(RouteStore.signup, postSignup); - -// Log In -app.get(RouteStore.login, getLogin); -app.post(RouteStore.login, postLogin); - -// Log Out -app.get(RouteStore.logout, getLogout); - -// FORGOT PASSWORD EMAIL HANDLING -app.get(RouteStore.forgot, getForgot); -app.post(RouteStore.forgot, postForgot); - -// RESET PASSWORD EMAIL HANDLING -app.get(RouteStore.reset, getReset); -app.post(RouteStore.reset, postReset); - -const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/; -app.use(RouteStore.corsProxy, (req, res) => { - req.pipe(request(decodeURIComponent(req.url.substring(1)))).on("response", res => { - const headers = Object.keys(res.headers); - headers.forEach(headerName => { - const header = res.headers[headerName]; - if (Array.isArray(header)) { - res.headers[headerName] = header.filter(h => !headerCharRegex.test(h)); - } else if (header) { - if (headerCharRegex.test(header as any)) { - delete res.headers[headerName]; - } - } - }); - }).pipe(res); -}); - -addSecureRoute({ - method: Method.GET, - subscribers: RouteStore.delete, - onValidation: (_user, _req, res) => { - if (release) { - return _permission_denied(res, deletionPermissionError); - } - deleteFields().then(() => res.redirect(RouteStore.home)); - } -}); - -addSecureRoute({ - method: Method.GET, - subscribers: RouteStore.deleteAll, - onValidation: (_user, _req, res) => { - if (release) { - return _permission_denied(res, deletionPermissionError); - } - deleteAll().then(() => res.redirect(RouteStore.home)); - } -}); - -app.use(wdm(compiler, { publicPath: config.output.publicPath })); - -app.use(whm(compiler)); - -// start the Express server -app.listen(port, () => - console.log(`server started at http://localhost:${port}`)); - -const server = io(); -interface Map { - [key: string]: Client; -} -let clients: Map = {}; - -let socketMap = new Map<SocketIO.Socket, string>(); -let timeMap: { [id: string]: number } = {}; - -server.on("connection", function (socket: Socket) { - socket.use((packet, next) => { - let id = socketMap.get(socket); - if (id) { - timeMap[id] = Date.now(); - } - next(); - }); - - Utils.Emit(socket, MessageStore.Foo, "handshooken"); - - Utils.AddServerHandler(socket, MessageStore.Bar, guid => barReceived(socket, guid)); - Utils.AddServerHandler(socket, MessageStore.SetField, (args) => setField(socket, args)); - Utils.AddServerHandlerCallback(socket, MessageStore.GetField, getField); - Utils.AddServerHandlerCallback(socket, MessageStore.GetFields, getFields); - if (!release) { - Utils.AddServerHandler(socket, MessageStore.DeleteAll, deleteFields); - } - - Utils.AddServerHandler(socket, MessageStore.CreateField, CreateField); - Utils.AddServerHandlerCallback(socket, MessageStore.YoutubeApiQuery, HandleYoutubeQuery); - Utils.AddServerHandler(socket, MessageStore.UpdateField, diff => UpdateField(socket, diff)); - Utils.AddServerHandler(socket, MessageStore.DeleteField, id => DeleteField(socket, id)); - Utils.AddServerHandler(socket, MessageStore.DeleteFields, ids => DeleteFields(socket, ids)); - Utils.AddServerHandlerCallback(socket, MessageStore.GetRefField, GetRefField); - Utils.AddServerHandlerCallback(socket, MessageStore.GetRefFields, GetRefFields); -}); - -async function deleteFields() { - await Database.Instance.deleteAll(); - await Search.Instance.clear(); - await Database.Instance.deleteAll('newDocuments'); -} - -async function deleteAll() { - await Database.Instance.deleteAll(); - await Database.Instance.deleteAll('newDocuments'); - await Database.Instance.deleteAll('sessions'); - await Database.Instance.deleteAll('users'); - await Search.Instance.clear(); -} - -function barReceived(socket: SocketIO.Socket, guid: string) { - clients[guid] = new Client(guid.toString()); - console.log(`User ${guid} has connected`); - socketMap.set(socket, guid); -} - -function getField([id, callback]: [string, (result?: Transferable) => void]) { - Database.Instance.getDocument(id, (result?: Transferable) => - callback(result ? result : undefined)); -} - -function getFields([ids, callback]: [string[], (result: Transferable[]) => void]) { - Database.Instance.getDocuments(ids, callback); -} - -function setField(socket: Socket, newValue: Transferable) { - Database.Instance.update(newValue.id, newValue, () => - socket.broadcast.emit(MessageStore.SetField.Message, newValue)); - if (newValue.type === Types.Text) { - Search.Instance.updateDocument({ id: newValue.id, data: (newValue as any).data }); - console.log("set field"); - console.log("checking in"); - } -} - -function GetRefField([id, callback]: [string, (result?: Transferable) => void]) { - Database.Instance.getDocument(id, callback, "newDocuments"); -} - -function GetRefFields([ids, callback]: [string[], (result?: Transferable[]) => void]) { - Database.Instance.getDocuments(ids, callback, "newDocuments"); -} - -function HandleYoutubeQuery([query, callback]: [YoutubeQueryInput, (result?: any[]) => void]) { - switch (query.type) { - case YoutubeQueryType.Channels: - YoutubeApi.authorizedGetChannel(youtubeApiKey); - break; - case YoutubeQueryType.SearchVideo: - YoutubeApi.authorizedGetVideos(youtubeApiKey, query.userInput, callback); - case YoutubeQueryType.VideoDetails: - YoutubeApi.authorizedGetVideoDetails(youtubeApiKey, query.videoIds, callback); - } -} - -const credentialsPath = path.join(__dirname, "./credentials/google_docs_credentials.json"); - -const EndpointHandlerMap = new Map<GoogleApiServerUtils.Action, GoogleApiServerUtils.ApiRouter>([ - ["create", (api, params) => api.create(params)], - ["retrieve", (api, params) => api.get(params)], - ["update", (api, params) => api.batchUpdate(params)], -]); - -app.post(RouteStore.googleDocs + "/:sector/:action", (req, res) => { - let sector: GoogleApiServerUtils.Service = req.params.sector as GoogleApiServerUtils.Service; - let action: GoogleApiServerUtils.Action = req.params.action as GoogleApiServerUtils.Action; - GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], { credentialsPath, userId: req.headers.userId as string }).then(endpoint => { - let handler = EndpointHandlerMap.get(action); - if (endpoint && handler) { - let execute = handler(endpoint, req.body).then( - response => res.send(response.data), - rejection => res.send(rejection) - ); - execute.catch(exception => res.send(exception)); - return; - } - res.send(undefined); - }); -}); - -addSecureRoute({ - method: Method.GET, - subscribers: RouteStore.readGoogleAccessToken, - onValidation: async (user, _req, res) => { - const userId = user.id; - const token = await Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId); - const information = { credentialsPath, userId }; - if (!token) { - return res.send(await GoogleApiServerUtils.GenerateAuthenticationUrl(information)); - } - GoogleApiServerUtils.RetrieveAccessToken(information).then(token => res.send(token)); - } -}); - -addSecureRoute({ - method: Method.POST, - subscribers: RouteStore.writeGoogleAccessToken, - onValidation: async (user, req, res) => { - const userId = user.id; - const information = { credentialsPath, userId }; - res.send(await GoogleApiServerUtils.ProcessClientSideCode(information, req.body.authenticationCode)); - } -}); - -const tokenError = "Unable to successfully upload bytes for all images!"; -const mediaError = "Unable to convert all uploaded bytes to media items!"; -const userIdError = "Unable to parse the identification of the user!"; - -export interface NewMediaItem { - description: string; - simpleMediaItem: { - uploadToken: string; - }; -} - -addSecureRoute({ - method: Method.POST, - subscribers: RouteStore.googlePhotosMediaUpload, - onValidation: async (user, req, res) => { - const { media } = req.body; - const userId = user.id; - if (!userId) { - return _error(res, userIdError); - } - - await GooglePhotosUploadUtils.initialize({ credentialsPath, userId }); - - let failed: number[] = []; - - const batched = BatchedArray.from<GooglePhotosUploadUtils.MediaInput>(media, { batchSize: 25 }); - const newMediaItems = await batched.batchedMapPatientInterval<NewMediaItem>( - { magnitude: 100, unit: TimeUnit.Milliseconds }, - async (batch, collector) => { - for (let index = 0; index < batch.length; index++) { - const { url, description } = batch[index]; - const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(url); - if (!uploadToken) { - failed.push(index); - } else { - collector.push({ - description, - simpleMediaItem: { uploadToken } - }); - } - } - } - ); - - const failedCount = failed.length; - if (failedCount) { - console.error(`Unable to upload ${failedCount} image${failedCount === 1 ? "" : "s"} to Google's servers`); - } - - GooglePhotosUploadUtils.CreateMediaItems(newMediaItems, req.body.album).then( - results => _success(res, { results, failed }), - error => _error(res, mediaError, error) - ); - } -}); - -interface MediaItem { - baseUrl: string; - filename: string; -} -const prefix = "google_photos_"; - -const downloadError = "Encountered an error while executing downloads."; -const requestError = "Unable to execute download: the body's media items were malformed."; -const deletionPermissionError = "Cannot perform specialized delete outside of the development environment!"; - -app.get("/deleteWithAux", async (_req, res) => { - if (release) { - return _permission_denied(res, deletionPermissionError); - } - await Database.Auxiliary.DeleteAll(); - res.redirect(RouteStore.delete); -}); - -app.get("/deleteWithGoogleCredentials", async (req, res) => { - if (release) { - return _permission_denied(res, deletionPermissionError); - } - await Database.Auxiliary.GoogleAuthenticationToken.DeleteAll(); - res.redirect(RouteStore.delete); -}); - -const UploadError = (count: number) => `Unable to upload ${count} images to Dash's server`; -app.post(RouteStore.googlePhotosMediaDownload, async (req, res) => { - const contents: { mediaItems: MediaItem[] } = req.body; - let failed = 0; - if (contents) { - const completed: Opt<DashUploadUtils.UploadInformation>[] = []; - for (let item of contents.mediaItems) { - const { contentSize, ...attributes } = await DashUploadUtils.InspectImage(item.baseUrl); - const found: Opt<DashUploadUtils.UploadInformation> = await Database.Auxiliary.QueryUploadHistory(contentSize!); - if (!found) { - const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error)); - if (upload) { - completed.push(upload); - await Database.Auxiliary.LogUpload(upload); - } else { - failed++; - } - } else { - completed.push(found); - } - } - if (failed) { - return _error(res, UploadError(failed)); - } - return _success(res, completed); - } - _invalid(res, requestError); -}); - -const _error = (res: Response, message: string, error?: any) => { - res.statusMessage = message; - res.status(STATUS.EXECUTION_ERROR).send(error); -}; - -const _success = (res: Response, body: any) => { - res.status(STATUS.OK).send(body); -}; - -const _invalid = (res: Response, message: string) => { - res.statusMessage = message; - res.status(STATUS.BAD_REQUEST).send(); -}; - -const _permission_denied = (res: Response, message: string) => { - res.statusMessage = message; - res.status(STATUS.BAD_REQUEST).send("Permission Denied!"); -}; - -const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = { - "number": "_n", - "string": "_t", - "boolean": "_b", - "image": ["_t", "url"], - "video": ["_t", "url"], - "pdf": ["_t", "url"], - "audio": ["_t", "url"], - "web": ["_t", "url"], - "RichTextField": ["_t", value => value.Text], - "date": ["_d", value => new Date(value.date).toISOString()], - "proxy": ["_i", "fieldId"], - "list": ["_l", list => { - const results = []; - for (const value of list.fields) { - const term = ToSearchTerm(value); - if (term) { - results.push(term.value); - } - } - return results.length ? results : null; - }] -}; - -function ToSearchTerm(val: any): { suffix: string, value: any } | undefined { - if (val === null || val === undefined) { - return; - } - const type = val.__type || typeof val; - let suffix = suffixMap[type]; - if (!suffix) { - return; - } - - if (Array.isArray(suffix)) { - const accessor = suffix[1]; - if (typeof accessor === "function") { - val = accessor(val); - } else { - val = val[accessor]; - } - suffix = suffix[0]; - } - - return { suffix, value: val }; -} - -function getSuffix(value: string | [string, any]): string { - return typeof value === "string" ? value : value[0]; -} - -function UpdateField(socket: Socket, diff: Diff) { - Database.Instance.update(diff.id, diff.diff, - () => socket.broadcast.emit(MessageStore.UpdateField.Message, diff), false, "newDocuments"); - const docfield = diff.diff.$set; - if (!docfield) { - return; - } - const update: any = { id: diff.id }; - let dynfield = false; - for (let key in docfield) { - if (!key.startsWith("fields.")) continue; - dynfield = true; - let val = docfield[key]; - key = key.substring(7); - Object.values(suffixMap).forEach(suf => update[key + getSuffix(suf)] = { set: null }); - let term = ToSearchTerm(val); - if (term !== undefined) { - let { suffix, value } = term; - update[key + suffix] = { set: value }; - } - } - if (dynfield) { - Search.Instance.updateDocument(update); - } -} - -function DeleteField(socket: Socket, id: string) { - Database.Instance.delete({ _id: id }, "newDocuments").then(() => { - socket.broadcast.emit(MessageStore.DeleteField.Message, id); +export async function launchServer() { + await log_execution({ + startMessage: "\nstarting execution of preliminary functions", + endMessage: "completed preliminary functions\n", + action: preliminaryFunctions }); - - Search.Instance.deleteDocuments([id]); -} - -function DeleteFields(socket: Socket, ids: string[]) { - Database.Instance.delete({ _id: { $in: ids } }, "newDocuments").then(() => { - socket.broadcast.emit(MessageStore.DeleteFields.Message, ids); - }); - - Search.Instance.deleteDocuments(ids); - + await initializeServer(routeSetter); } -function CreateField(newValue: any) { - Database.Instance.insert(newValue, "newDocuments"); -} - -server.listen(serverPort); -console.log(`listening on port ${serverPort}`); - +/** + * If you're in development mode, you won't need to run a session. + * The session spawns off new server processes each time an error is encountered, and doesn't + * log the output of the server process, so it's not ideal for development. + * So, the 'else' clause is exactly what we've always run when executing npm start. + */ +if (process.env.RELEASE) { + (sessionAgent = new DashSessionAgent()).launch(); +} else { + launchServer(); +}
\ No newline at end of file diff --git a/src/server/public/files/.gitignore b/src/server/public/files/.gitignore deleted file mode 100644 index c96a04f00..000000000 --- a/src/server/public/files/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -* -!.gitignore
\ No newline at end of file diff --git a/src/server/remapUrl.ts b/src/server/remapUrl.ts index 5218a239a..45d2fdd33 100644 --- a/src/server/remapUrl.ts +++ b/src/server/remapUrl.ts @@ -54,7 +54,7 @@ async function update() { })); console.log("Done"); // await Promise.all(updates.map(update => { - // return limit(() => Search.Instance.updateDocument(update)); + // return limit(() => Search.updateDocument(update)); // })); cursor.close(); } diff --git a/src/server/repl.ts b/src/server/repl.ts new file mode 100644 index 000000000..c4526528e --- /dev/null +++ b/src/server/repl.ts @@ -0,0 +1,123 @@ +import { createInterface, Interface } from "readline"; +import { red, green, white } from "colors"; + +export interface Configuration { + identifier: () => string | string; + onInvalid?: (command: string, validCommand: boolean) => string | string; + onValid?: (success?: string) => string | string; + isCaseSensitive?: boolean; +} + +export type ReplAction = (parsedArgs: Array<string>) => any | Promise<any>; +export interface Registration { + argPatterns: RegExp[]; + action: ReplAction; +} + +export default class Repl { + private identifier: () => string | string; + private onInvalid: ((command: string, validCommand: boolean) => string) | string; + private onValid: ((success: string) => string) | string; + private isCaseSensitive: boolean; + private commandMap = new Map<string, Registration[]>(); + public interface: Interface; + private busy = false; + private keys: string | undefined; + + constructor({ identifier: prompt, onInvalid, onValid, isCaseSensitive }: Configuration) { + this.identifier = prompt; + this.onInvalid = onInvalid || this.usage; + this.onValid = onValid || this.success; + this.isCaseSensitive = isCaseSensitive ?? true; + this.interface = createInterface(process.stdin, process.stdout).on('line', this.considerInput); + } + + private resolvedIdentifier = () => typeof this.identifier === "string" ? this.identifier : this.identifier(); + + private usage = (command: string, validCommand: boolean) => { + if (validCommand) { + const formatted = white(command); + const patterns = green(this.commandMap.get(command)!.map(({ argPatterns }) => `${formatted} ${argPatterns.join(" ")}`).join('\n')); + return `${this.resolvedIdentifier()}\nthe given arguments do not match any registered patterns for ${formatted}\nthe list of valid argument patterns is given by:\n${patterns}`; + } else { + const resolved = this.keys; + if (resolved) { + return resolved; + } + const members: string[] = []; + const keys = this.commandMap.keys(); + let next: IteratorResult<string>; + while (!(next = keys.next()).done) { + members.push(next.value); + } + return `${this.resolvedIdentifier()} commands: { ${members.sort().join(", ")} }`; + } + } + + private success = (command: string) => `${this.resolvedIdentifier()} completed execution of ${white(command)}`; + + public registerCommand = (basename: string, argPatterns: (RegExp | string)[], action: ReplAction) => { + const existing = this.commandMap.get(basename); + const converted = argPatterns.map(input => input instanceof RegExp ? input : new RegExp(input)); + const registration = { argPatterns: converted, action }; + if (existing) { + existing.push(registration); + } else { + this.commandMap.set(basename, [registration]); + } + } + + private invalid = (command: string, validCommand: boolean) => { + console.log(red(typeof this.onInvalid === "string" ? this.onInvalid : this.onInvalid(command, validCommand))); + this.busy = false; + } + + private valid = (command: string) => { + console.log(green(typeof this.onValid === "string" ? this.onValid : this.onValid(command))); + this.busy = false; + } + + private considerInput = async (line: string) => { + if (this.busy) { + console.log(red("Busy")); + return; + } + this.busy = true; + line = line.trim(); + if (this.isCaseSensitive) { + line = line.toLowerCase(); + } + const [command, ...args] = line.split(/\s+/g); + if (!command) { + return this.invalid(command, false); + } + const registered = this.commandMap.get(command); + if (registered) { + const { length } = args; + const candidates = registered.filter(({ argPatterns: { length: count } }) => count === length); + for (const { argPatterns, action } of candidates) { + const parsed: string[] = []; + let matched = true; + if (length) { + for (let i = 0; i < length; i++) { + let matches: RegExpExecArray | null; + if ((matches = argPatterns[i].exec(args[i])) === null) { + matched = false; + break; + } + parsed.push(matches[0]); + } + } + if (!length || matched) { + await action(parsed); + this.valid(`${command} ${parsed.join(" ")}`); + return; + } + } + this.invalid(command, true); + } else { + this.invalid(command, false); + } + } + +}
\ No newline at end of file diff --git a/src/server/server_Initialization.ts b/src/server/server_Initialization.ts new file mode 100644 index 000000000..cbe070293 --- /dev/null +++ b/src/server/server_Initialization.ts @@ -0,0 +1,155 @@ +import * as express from 'express'; +import * as expressValidator from 'express-validator'; +import * as session from 'express-session'; +import * as passport from 'passport'; +import * as bodyParser from 'body-parser'; +import * as cookieParser from 'cookie-parser'; +import expressFlash = require('express-flash'); +import flash = require('connect-flash'); +import { Database } from './database'; +import { getForgot, getLogin, getLogout, getReset, getSignup, postForgot, postLogin, postReset, postSignup } from './authentication/controllers/user_controller'; +const MongoStore = require('connect-mongo')(session); +import RouteManager from './RouteManager'; +import * as webpack from 'webpack'; +const config = require('../../webpack.config'); +const compiler = webpack(config); +import * as wdm from 'webpack-dev-middleware'; +import * as whm from 'webpack-hot-middleware'; +import * as fs from 'fs'; +import * as request from 'request'; +import RouteSubscriber from './RouteSubscriber'; +import { publicDirectory } from '.'; +import { logPort, } from './ActionUtilities'; +import { timeMap } from './ApiManagers/UserManager'; +import { blue, yellow } from 'colors'; +import * as cors from "cors"; + +/* RouteSetter is a wrapper around the server that prevents the server + from being exposed. */ +export type RouteSetter = (server: RouteManager) => void; +export let disconnect: Function; + +export default async function InitializeServer(routeSetter: RouteSetter) { + const app = buildWithMiddleware(express()); + + app.use(express.static(publicDirectory, { + setHeaders: res => res.setHeader("Access-Control-Allow-Origin", "*") + })); + app.use("/images", express.static(publicDirectory)); + const corsOptions = { + origin: function (_origin: any, callback: any) { + callback(null, true); + } + }; + app.use(cors(corsOptions)); + app.use("*", ({ user, originalUrl }, res, next) => { + if (user && !originalUrl.includes("Heartbeat")) { + const userEmail = (user as any).email; + if (userEmail) { + timeMap[userEmail] = Date.now(); + } + } + if (!user && originalUrl === "/") { + return res.redirect("/login"); + } + next(); + }); + + app.use(wdm(compiler, { publicPath: config.output.publicPath })); + app.use(whm(compiler)); + + registerAuthenticationRoutes(app); + registerCorsProxy(app); + + const isRelease = determineEnvironment(); + + routeSetter(new RouteManager(app, isRelease)); + + const serverPort = isRelease ? Number(process.env.serverPort) : 1050; + const server = app.listen(serverPort, () => { + logPort("server", Number(serverPort)); + console.log(); + }); + disconnect = async () => new Promise<Error>(resolve => server.close(resolve)); + + return isRelease; +} + +const week = 7 * 24 * 60 * 60 * 1000; +const secret = "64d6866242d3b5a5503c675b32c9605e4e90478e9b77bcf2bc"; + +function buildWithMiddleware(server: express.Express) { + [ + cookieParser(), + session({ + secret, + resave: true, + cookie: { maxAge: week }, + saveUninitialized: true, + store: new MongoStore({ url: Database.url }) + }), + flash(), + expressFlash(), + bodyParser.json({ limit: "10mb" }), + bodyParser.urlencoded({ extended: true }), + expressValidator(), + passport.initialize(), + passport.session(), + (req: express.Request, res: express.Response, next: express.NextFunction) => { + res.locals.user = req.user; + next(); + } + ].forEach(next => server.use(next)); + return server; +} + +/* Determine if the enviroment is dev mode or release mode. */ +function determineEnvironment() { + const isRelease = process.env.RELEASE === "true"; + + const color = isRelease ? blue : yellow; + const label = isRelease ? "release" : "development"; + console.log(`\nrunning server in ${color(label)} mode`); + + let clientUtils = fs.readFileSync("./src/client/util/ClientUtils.ts.temp", "utf8"); + clientUtils = `//AUTO-GENERATED FILE: DO NOT EDIT\n${clientUtils.replace('"mode"', String(isRelease))}`; + fs.writeFileSync("./src/client/util/ClientUtils.ts", clientUtils, "utf8"); + + return isRelease; +} + +function registerAuthenticationRoutes(server: express.Express) { + server.get("/signup", getSignup); + server.post("/signup", postSignup); + + server.get("/login", getLogin); + server.post("/login", postLogin); + + server.get("/logout", getLogout); + + server.get("/forgotPassword", getForgot); + server.post("/forgotPassword", postForgot); + + const reset = new RouteSubscriber("resetPassword").add("token").build; + server.get(reset, getReset); + server.post(reset, postReset); +} + +function registerCorsProxy(server: express.Express) { + const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/; + server.use("/corsProxy", (req, res) => { + req.pipe(request(decodeURIComponent(req.url.substring(1)))).on("response", res => { + const headers = Object.keys(res.headers); + headers.forEach(headerName => { + const header = res.headers[headerName]; + if (Array.isArray(header)) { + res.headers[headerName] = header.filter(h => !headerCharRegex.test(h)); + } else if (header) { + if (headerCharRegex.test(header as any)) { + delete res.headers[headerName]; + } + } + }); + }).pipe(res); + }); +}
\ No newline at end of file diff --git a/src/server/updateSearch.ts b/src/server/updateSearch.ts new file mode 100644 index 000000000..83094d36a --- /dev/null +++ b/src/server/updateSearch.ts @@ -0,0 +1,121 @@ +import { Database } from "./database"; +import { Search } from "./Search"; +import { log_execution } from "./ActionUtilities"; +import { cyan, green, yellow, red } from "colors"; + +const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = { + "number": "_n", + "string": "_t", + "boolean": "_b", + "image": ["_t", "url"], + "video": ["_t", "url"], + "pdf": ["_t", "url"], + "audio": ["_t", "url"], + "web": ["_t", "url"], + "date": ["_d", value => new Date(value.date).toISOString()], + "proxy": ["_i", "fieldId"], + "list": ["_l", list => { + const results = []; + for (const value of list.fields) { + const term = ToSearchTerm(value); + if (term) { + results.push(term.value); + } + } + return results.length ? results : null; + }] +}; + +function ToSearchTerm(val: any): { suffix: string, value: any } | undefined { + if (val === null || val === undefined) { + return; + } + const type = val.__type || typeof val; + let suffix = suffixMap[type]; + if (!suffix) { + return; + } + + if (Array.isArray(suffix)) { + const accessor = suffix[1]; + if (typeof accessor === "function") { + val = accessor(val); + } else { + val = val[accessor]; + } + suffix = suffix[0]; + } + + return { suffix, value: val }; +} + +async function update() { + console.log(green("Beginning update...")); + await log_execution<void>({ + startMessage: "Clearing existing Solr information...", + endMessage: "Solr information successfully cleared", + action: Search.clear, + color: cyan + }); + const cursor = await log_execution({ + startMessage: "Connecting to and querying for all documents from database...", + endMessage: ({ result, error }) => { + const success = error === null && result !== undefined; + if (!success) { + console.log(red("Unable to connect to the database.")); + process.exit(0); + } + return "Connection successful and query complete"; + }, + action: () => Database.Instance.query({}), + color: yellow + }); + const updates: any[] = []; + let numDocs = 0; + function updateDoc(doc: any) { + numDocs++; + if ((numDocs % 50) === 0) { + console.log(`Batch of 50 complete, total of ${numDocs}`); + } + if (doc.__type !== "Doc") { + return; + } + const fields = doc.fields; + if (!fields) { + return; + } + const update: any = { id: doc._id }; + let dynfield = false; + for (const key in fields) { + const value = fields[key]; + const term = ToSearchTerm(value); + if (term !== undefined) { + const { suffix, value } = term; + update[key + suffix] = value; + dynfield = true; + } + } + if (dynfield) { + updates.push(update); + } + } + await cursor?.forEach(updateDoc); + const result = await log_execution({ + startMessage: `Dispatching updates for ${updates.length} documents`, + endMessage: "Dispatched updates complete", + action: () => Search.updateDocuments(updates), + color: cyan + }); + try { + const { status } = JSON.parse(result).responseHeader; + console.log(status ? red(`Failed with status code (${status})`) : green("Success!")); + } catch { + console.log(red("Error:")); + console.log(result); + console.log("\n"); + } + await cursor?.close(); + process.exit(0); +} + +update();
\ No newline at end of file |
