diff options
Diffstat (limited to 'src/server')
31 files changed, 1041 insertions, 250 deletions
diff --git a/src/server/ActionUtilities.ts b/src/server/ActionUtilities.ts index 4fe7374d1..60f66c878 100644 --- a/src/server/ActionUtilities.ts +++ b/src/server/ActionUtilities.ts @@ -1,46 +1,81 @@ -import * as fs from 'fs'; +import { readFile, writeFile, exists, mkdir, unlink, createWriteStream } from 'fs'; import { ExecOptions } from 'shelljs'; import { exec } from 'child_process'; import * as path from 'path'; import * as rimraf from "rimraf"; -import { yellow } from 'colors'; +import { yellow, Color } from 'colors'; +import * as nodemailer from "nodemailer"; +import { MailOptions } from "nodemailer/lib/json-transport"; +import Mail = require('nodemailer/lib/mailer'); + +const projectRoot = path.resolve(__dirname, "../../"); +export function pathFromRoot(relative?: string) { + if (!relative) { + return projectRoot; + } + return path.resolve(projectRoot, relative); +} + +export async function fileDescriptorFromStream(path: string) { + const logStream = createWriteStream(path); + return new Promise<number>(resolve => logStream.on("open", resolve)); +} export const command_line = (command: string, fromDirectory?: string) => { return new Promise<string>((resolve, reject) => { const options: ExecOptions = {}; if (fromDirectory) { - options.cwd = path.join(__dirname, fromDirectory); + options.cwd = fromDirectory ? path.resolve(projectRoot, fromDirectory) : projectRoot; } exec(command, options, (err, stdout) => err ? reject(err) : resolve(stdout)); }); }; export const read_text_file = (relativePath: string) => { - const target = path.join(__dirname, relativePath); + const target = path.resolve(__dirname, relativePath); return new Promise<string>((resolve, reject) => { - fs.readFile(target, (err, data) => err ? reject(err) : resolve(data.toString())); + readFile(target, (err, data) => err ? reject(err) : resolve(data.toString())); }); }; export const write_text_file = (relativePath: string, contents: any) => { - const target = path.join(__dirname, relativePath); + const target = path.resolve(__dirname, relativePath); return new Promise<void>((resolve, reject) => { - fs.writeFile(target, contents, (err) => err ? reject(err) : resolve()); + writeFile(target, contents, (err) => err ? reject(err) : resolve()); }); }; -export interface LogData { +export type Messager<T> = (outcome: { result: T | undefined, error: Error | null }) => string; + +export interface LogData<T> { startMessage: string; - endMessage: string; - action: () => void | Promise<void>; + // if you care about the execution informing your log, you can pass in a function that takes in the result and a potential error and decides what to write + endMessage: string | Messager<T>; + action: () => T | Promise<T>; + color?: Color; } let current = Math.ceil(Math.random() * 20); -export async function log_execution({ startMessage, endMessage, action }: LogData) { - const color = `\x1b[${31 + current++ % 6}m%s\x1b[0m`; - console.log(color, `${startMessage}...`); - await action(); - console.log(color, endMessage); +export async function log_execution<T>({ startMessage, endMessage, action, color }: LogData<T>): Promise<T | undefined> { + let result: T | undefined = undefined, error: Error | null = null; + const resolvedColor = color || `\x1b[${31 + ++current % 6}m%s\x1b[0m`; + log_helper(`${startMessage}...`, resolvedColor); + try { + result = await action(); + } catch (e) { + error = e; + } finally { + log_helper(typeof endMessage === "string" ? endMessage : endMessage({ result, error }), resolvedColor); + } + return result; +} + +function log_helper(content: string, color: Color | string) { + if (typeof color === "string") { + console.log(color, content); + } else { + console.log(color(content)); + } } export function logPort(listener: string, port: number) { @@ -61,10 +96,10 @@ export function msToTime(duration: number) { } export const createIfNotExists = async (path: string) => { - if (await new Promise<boolean>(resolve => fs.exists(path, resolve))) { + if (await new Promise<boolean>(resolve => exists(path, resolve))) { return true; } - return new Promise<boolean>(resolve => fs.mkdir(path, error => resolve(error === null))); + return new Promise<boolean>(resolve => mkdir(path, error => resolve(error === null))); }; export async function Prune(rootDirectory: string): Promise<boolean> { @@ -72,8 +107,54 @@ export async function Prune(rootDirectory: string): Promise<boolean> { return error === null; } -export const Destroy = (mediaPath: string) => new Promise<boolean>(resolve => fs.unlink(mediaPath, error => resolve(error === null))); +export const Destroy = (mediaPath: string) => new Promise<boolean>(resolve => unlink(mediaPath, error => resolve(error === null))); -export function addBeforeExitHandler(handler: NodeJS.BeforeExitListener) { - process.on("beforeExit", handler); -} +export namespace Email { + + const smtpTransport = nodemailer.createTransport({ + service: 'Gmail', + auth: { + user: 'brownptcdash@gmail.com', + pass: 'browngfx1' + } + }); + + export interface DispatchOptions<T extends string | string[]> { + to: T; + subject: string; + content: string; + attachments?: Mail.Attachment | Mail.Attachment[]; + } + + export interface DispatchFailure { + recipient: string; + error: Error; + } + + export async function dispatchAll({ to, subject, content, attachments }: DispatchOptions<string[]>) { + const failures: DispatchFailure[] = []; + await Promise.all(to.map(async recipient => { + let error: Error | null; + const resolved = attachments ? "length" in attachments ? attachments : [attachments] : undefined; + if ((error = await Email.dispatch({ to: recipient, subject, content, attachments: resolved })) !== null) { + failures.push({ + recipient, + error + }); + } + })); + return failures.length ? failures : undefined; + } + + export async function dispatch({ to, subject, content, attachments }: DispatchOptions<string>): Promise<Error | null> { + const mailOptions = { + to, + from: 'brownptcdash@gmail.com', + subject, + text: `Hello ${to.split("@")[0]},\n\n${content}`, + attachments + } as MailOptions; + return new Promise<Error | null>(resolve => smtpTransport.sendMail(mailOptions, resolve)); + } + +}
\ No newline at end of file diff --git a/src/server/ApiManagers/DeleteManager.ts b/src/server/ApiManagers/DeleteManager.ts index 1fdc7cc36..be452c0ff 100644 --- a/src/server/ApiManagers/DeleteManager.ts +++ b/src/server/ApiManagers/DeleteManager.ts @@ -1,5 +1,5 @@ import ApiManager, { Registration } from "./ApiManager"; -import { Method, _permission_denied, OnUnauthenticated } from "../RouteManager"; +import { Method, _permission_denied, PublicHandler } from "../RouteManager"; import { WebSocket } from "../Websocket/Websocket"; import { Database } from "../database"; @@ -10,7 +10,7 @@ export default class DeleteManager extends ApiManager { register({ method: Method.GET, subscription: "/delete", - onValidation: async ({ res, isRelease }) => { + secureHandler: async ({ res, isRelease }) => { if (isRelease) { return _permission_denied(res, deletionPermissionError); } @@ -22,7 +22,7 @@ export default class DeleteManager extends ApiManager { register({ method: Method.GET, subscription: "/deleteAll", - onValidation: async ({ res, isRelease }) => { + secureHandler: async ({ res, isRelease }) => { if (isRelease) { return _permission_denied(res, deletionPermissionError); } @@ -31,7 +31,7 @@ export default class DeleteManager extends ApiManager { } }); - const hi: OnUnauthenticated = async ({ res, isRelease }) => { + const hi: PublicHandler = async ({ res, isRelease }) => { if (isRelease) { return _permission_denied(res, deletionPermissionError); } @@ -50,7 +50,7 @@ export default class DeleteManager extends ApiManager { register({ method: Method.GET, subscription: "/deleteWithAux", - onValidation: async ({ res, isRelease }) => { + secureHandler: async ({ res, isRelease }) => { if (isRelease) { return _permission_denied(res, deletionPermissionError); } @@ -62,7 +62,7 @@ export default class DeleteManager extends ApiManager { register({ method: Method.GET, subscription: "/deleteWithGoogleCredentials", - onValidation: async ({ res, isRelease }) => { + secureHandler: async ({ res, isRelease }) => { if (isRelease) { return _permission_denied(res, deletionPermissionError); } diff --git a/src/server/ApiManagers/DiagnosticManager.ts b/src/server/ApiManagers/DiagnosticManager.ts deleted file mode 100644 index 104985481..000000000 --- a/src/server/ApiManagers/DiagnosticManager.ts +++ /dev/null @@ -1,30 +0,0 @@ -import ApiManager, { Registration } from "./ApiManager"; -import { Method } from "../RouteManager"; -import request = require('request-promise'); - -export default class DiagnosticManager extends ApiManager { - - protected initialize(register: Registration): void { - - register({ - method: Method.GET, - subscription: "/serverHeartbeat", - onValidation: ({ res }) => res.send(true) - }); - - register({ - method: Method.GET, - subscription: "/solrHeartbeat", - onValidation: async ({ res }) => { - try { - await request("http://localhost:8983"); - res.send({ running: true }); - } catch (e) { - res.send({ running: false }); - } - } - }); - - } - -}
\ No newline at end of file diff --git a/src/server/ApiManagers/DownloadManager.ts b/src/server/ApiManagers/DownloadManager.ts index 2280739fc..1bb84f374 100644 --- a/src/server/ApiManagers/DownloadManager.ts +++ b/src/server/ApiManagers/DownloadManager.ts @@ -7,6 +7,7 @@ import { Database } from "../database"; import * as path from "path"; import { DashUploadUtils, SizeSuffix } from "../DashUploadUtils"; import { publicDirectory } from ".."; +import { serverPathToFile, Directory } from "./UploadManager"; export type Hierarchy = { [id: string]: string | Hierarchy }; export type ZipMutator = (file: Archiver.Archiver) => void | Promise<void>; @@ -32,7 +33,7 @@ export default class DownloadManager extends ApiManager { register({ method: Method.GET, subscription: new RouteSubscriber("imageHierarchyExport").add('docId'), - onValidation: async ({ req, res }) => { + secureHandler: async ({ req, res }) => { const id = req.params.docId; const hierarchy: Hierarchy = {}; await buildHierarchyRecursive(id, hierarchy); @@ -43,7 +44,7 @@ export default class DownloadManager extends ApiManager { register({ method: Method.GET, subscription: new RouteSubscriber("downloadId").add("docId"), - onValidation: async ({ req, res }) => { + secureHandler: async ({ req, res }) => { return BuildAndDispatchZip(res, async zip => { const { id, docs, files } = await getDocs(req.params.docId); const docString = JSON.stringify({ id, docs }); @@ -58,7 +59,7 @@ export default class DownloadManager extends ApiManager { register({ method: Method.GET, subscription: new RouteSubscriber("serializeDoc").add("docId"), - onValidation: async ({ req, res }) => { + secureHandler: async ({ req, res }) => { const { docs, files } = await getDocs(req.params.docId); res.send({ docs, files: Array.from(files) }); } @@ -245,9 +246,9 @@ async function writeHierarchyRecursive(file: Archiver.Archiver, hierarchy: Hiera if (typeof result === "string") { let path: string; let matches: RegExpExecArray | null; - if ((matches = /\:1050\/files\/(upload\_[\da-z]{32}.*)/g.exec(result)) !== null) { + if ((matches = /\:1050\/files\/images\/(upload\_[\da-z]{32}.*)/g.exec(result)) !== null) { // image already exists on our server - path = `${__dirname}/public/files/${matches[1]}`; + path = serverPathToFile(Directory.images, matches[1]); } else { // the image doesn't already exist on our server (may have been dragged // and dropped in the browser and thus hosted remotely) so we upload it diff --git a/src/server/ApiManagers/GeneralGoogleManager.ts b/src/server/ApiManagers/GeneralGoogleManager.ts index 3617779d5..a5240edbc 100644 --- a/src/server/ApiManagers/GeneralGoogleManager.ts +++ b/src/server/ApiManagers/GeneralGoogleManager.ts @@ -19,7 +19,7 @@ export default class GeneralGoogleManager extends ApiManager { register({ method: Method.GET, subscription: "/readGoogleAccessToken", - onValidation: async ({ user, res }) => { + secureHandler: async ({ user, res }) => { const token = await GoogleApiServerUtils.retrieveAccessToken(user.id); if (!token) { return res.send(GoogleApiServerUtils.generateAuthenticationUrl()); @@ -31,7 +31,7 @@ export default class GeneralGoogleManager extends ApiManager { register({ method: Method.POST, subscription: "/writeGoogleAccessToken", - onValidation: async ({ user, req, res }) => { + secureHandler: async ({ user, req, res }) => { res.send(await GoogleApiServerUtils.processNewUser(user.id, req.body.authenticationCode)); } }); @@ -39,7 +39,7 @@ export default class GeneralGoogleManager extends ApiManager { register({ method: Method.POST, subscription: new RouteSubscriber("googleDocs").add("sector", "action"), - onValidation: async ({ req, res, user }) => { + secureHandler: async ({ req, res, user }) => { const sector: GoogleApiServerUtils.Service = req.params.sector as GoogleApiServerUtils.Service; const action: GoogleApiServerUtils.Action = req.params.action as GoogleApiServerUtils.Action; const endpoint = await GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], user.id); diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts index e2539f120..107542ce2 100644 --- a/src/server/ApiManagers/GooglePhotosManager.ts +++ b/src/server/ApiManagers/GooglePhotosManager.ts @@ -41,7 +41,7 @@ export default class GooglePhotosManager extends ApiManager { register({ method: Method.POST, subscription: "/googlePhotosMediaUpload", - onValidation: async ({ user, req, res }) => { + secureHandler: async ({ user, req, res }) => { const { media } = req.body; const token = await GoogleApiServerUtils.retrieveAccessToken(user.id); if (!token) { @@ -82,7 +82,7 @@ export default class GooglePhotosManager extends ApiManager { register({ method: Method.POST, subscription: "/googlePhotosMediaDownload", - onValidation: async ({ req, res }) => { + secureHandler: async ({ req, res }) => { const contents: { mediaItems: MediaItem[] } = req.body; let failed = 0; if (contents) { diff --git a/src/server/ApiManagers/PDFManager.ts b/src/server/ApiManagers/PDFManager.ts index a190ab0cb..0136b758e 100644 --- a/src/server/ApiManagers/PDFManager.ts +++ b/src/server/ApiManagers/PDFManager.ts @@ -1,7 +1,7 @@ import ApiManager, { Registration } from "./ApiManager"; import { Method } from "../RouteManager"; import RouteSubscriber from "../RouteSubscriber"; -import { exists, createReadStream, createWriteStream } from "fs"; +import { existsSync, createReadStream, createWriteStream } from "fs"; import * as Pdfjs from 'pdfjs-dist'; import { createCanvas } from "canvas"; const imageSize = require("probe-image-size"); @@ -17,42 +17,37 @@ export default class PDFManager extends ApiManager { register({ method: Method.GET, subscription: new RouteSubscriber("thumbnail").add("filename"), - onValidation: ({ req, res }) => getOrCreateThumbnail(req.params.filename, res) + secureHandler: ({ req, res }) => getOrCreateThumbnail(req.params.filename, res) }); } } -function getOrCreateThumbnail(thumbnailName: string, res: express.Response) { +async function getOrCreateThumbnail(thumbnailName: string, res: express.Response): Promise<void> { const noExtension = thumbnailName.substring(0, thumbnailName.length - ".png".length); const pageString = noExtension.split('-')[1]; const pageNumber = parseInt(pageString); - return new Promise<void>(resolve => { + return new Promise<void>(async resolve => { const path = serverPathToFile(Directory.pdf_thumbnails, thumbnailName); - exists(path, (exists: boolean) => { - if (exists) { - const existingThumbnail = createReadStream(path); - imageSize(existingThumbnail, (err: any, { width, height }: any) => { - if (err) { - console.log(red(`In PDF thumbnail response, unable to determine dimensions of ${thumbnailName}:`)); - console.log(err); - return; - } - res.send({ - path: clientPathToFile(Directory.pdf_thumbnails, thumbnailName), - width, - height - }); - }); - } else { - const offset = thumbnailName.length - pageString.length - 5; - const name = thumbnailName.substring(0, offset) + ".pdf"; - const path = serverPathToFile(Directory.pdfs, name); - CreateThumbnail(path, pageNumber, res); + if (existsSync(path)) { + const existingThumbnail = createReadStream(path); + const { err, viewport } = await new Promise<any>(resolve => { + imageSize(existingThumbnail, (err: any, viewport: any) => resolve({ err, viewport })); + }); + if (err) { + console.log(red(`In PDF thumbnail response, unable to determine dimensions of ${thumbnailName}:`)); + console.log(err); + return; } - resolve(); - }); + dispatchThumbnail(res, viewport, thumbnailName); + } else { + const offset = thumbnailName.length - pageString.length - 5; + const name = thumbnailName.substring(0, offset) + ".pdf"; + const path = serverPathToFile(Directory.pdfs, name); + await CreateThumbnail(path, pageNumber, res); + } + resolve(); }); } @@ -70,19 +65,28 @@ async function CreateThumbnail(file: string, pageNumber: number, res: express.Re await page.render(renderContext).promise; const pngStream = canvas.createPNGStream(); const filenames = path.basename(file).split("."); - const pngFile = serverPathToFile(Directory.pdf_thumbnails, `${filenames[0]}-${pageNumber}.png`); + const thumbnailName = `${filenames[0]}-${pageNumber}.png`; + const pngFile = serverPathToFile(Directory.pdf_thumbnails, thumbnailName); const out = createWriteStream(pngFile); pngStream.pipe(out); - out.on("finish", () => { - res.send({ - path: pngFile, - width: viewport.width, - height: viewport.height + return new Promise<void>((resolve, reject) => { + out.on("finish", () => { + dispatchThumbnail(res, viewport, thumbnailName); + resolve(); + }); + out.on("error", error => { + console.log(red(`In PDF thumbnail creation, encountered the following error when piping ${pngFile}:`)); + console.log(error); + reject(); }); }); - out.on("error", error => { - console.log(red(`In PDF thumbnail creation, encountered the following error when piping ${pngFile}:`)); - console.log(error); +} + +function dispatchThumbnail(res: express.Response, { width, height }: Pdfjs.PDFPageViewport, thumbnailName: string) { + res.send({ + path: clientPathToFile(Directory.pdf_thumbnails, thumbnailName), + width, + height }); } diff --git a/src/server/ApiManagers/SearchManager.ts b/src/server/ApiManagers/SearchManager.ts index 7afecbb18..4ce12f9f3 100644 --- a/src/server/ApiManagers/SearchManager.ts +++ b/src/server/ApiManagers/SearchManager.ts @@ -4,15 +4,34 @@ import { Search } from "../Search"; const findInFiles = require('find-in-files'); import * as path from 'path'; import { pathToDirectory, Directory } from "./UploadManager"; +import { red, cyan, yellow } from "colors"; +import RouteSubscriber from "../RouteSubscriber"; +import { exec } from "child_process"; +import { onWindows } from ".."; +import { get } from "request-promise"; -export default class SearchManager extends ApiManager { +export class SearchManager extends ApiManager { protected initialize(register: Registration): void { register({ method: Method.GET, + subscription: new RouteSubscriber("solr").add("action"), + secureHandler: async ({ req, res }) => { + const { action } = req.params; + if (["start", "stop"].includes(action)) { + const status = req.params.action === "start"; + const success = await SolrManager.SetRunning(status); + console.log(success ? `Successfully ${status ? "started" : "stopped"} Solr!` : `Uh oh! Check the console for the error that occurred while ${status ? "starting" : "stopping"} Solr`); + } + res.redirect("/home"); + } + }); + + register({ + method: Method.GET, subscription: "/textsearch", - onValidation: async ({ req, res }) => { + secureHandler: async ({ req, res }) => { const q = req.query.q; if (q === undefined) { res.send([]); @@ -32,18 +51,43 @@ export default class SearchManager extends ApiManager { register({ method: Method.GET, subscription: "/search", - onValidation: async ({ req, res }) => { + secureHandler: async ({ req, res }) => { const solrQuery: any = {}; ["q", "fq", "start", "rows", "hl", "hl.fl"].forEach(key => solrQuery[key] = req.query[key]); if (solrQuery.q === undefined) { res.send([]); return; } - const results = await Search.Instance.search(solrQuery); + const results = await Search.search(solrQuery); res.send(results); } }); } +} + +export namespace SolrManager { + + const command = onWindows ? "solr.cmd" : "solr"; + + export async function SetRunning(status: boolean): Promise<boolean> { + const args = status ? "start" : "stop -p 8983"; + console.log(`solr management: trying to ${args}`); + exec(`${command} ${args}`, { cwd: "./solr-8.3.1/bin" }, (error, stdout, stderr) => { + if (error) { + console.log(red(`solr management error: unable to ${args} server`)); + console.log(red(error.message)); + } + console.log(cyan(stdout)); + console.log(yellow(stderr)); + }); + try { + await get("http://localhost:8983"); + return true; + } catch { + return false; + } + } + }
\ No newline at end of file diff --git a/src/server/ApiManagers/SessionManager.ts b/src/server/ApiManagers/SessionManager.ts new file mode 100644 index 000000000..a99aa05e0 --- /dev/null +++ b/src/server/ApiManagers/SessionManager.ts @@ -0,0 +1,59 @@ +import ApiManager, { Registration } from "./ApiManager"; +import { Method, _permission_denied, AuthorizedCore, SecureHandler } from "../RouteManager"; +import RouteSubscriber from "../RouteSubscriber"; +import { sessionAgent } from ".."; +import { DashSessionAgent } from "../DashSession/DashSessionAgent"; + +const permissionError = "You are not authorized!"; + +export default class SessionManager extends ApiManager { + + private secureSubscriber = (root: string, ...params: string[]) => new RouteSubscriber(root).add("sessionKey", ...params); + + private authorizedAction = (handler: SecureHandler) => { + return (core: AuthorizedCore) => { + const { req, res, isRelease } = core; + const { sessionKey } = req.params; + if (!isRelease) { + return res.send("This can be run only on the release server."); + } + if (sessionKey !== process.env.session_key) { + return _permission_denied(res, permissionError); + } + return handler(core); + }; + } + + protected initialize(register: Registration): void { + + register({ + method: Method.GET, + subscription: this.secureSubscriber("debug", "to?"), + secureHandler: this.authorizedAction(async ({ req: { params }, res }) => { + const to = params.to || DashSessionAgent.notificationRecipient; + const { error } = await sessionAgent.serverWorker.emit("debug", { to }); + res.send(error ? error.message : `Your request was successful: the server captured and compressed (but did not save) a new back up. It was sent to ${to}.`); + }) + }); + + register({ + method: Method.GET, + subscription: this.secureSubscriber("backup"), + secureHandler: this.authorizedAction(async ({ res }) => { + const { error } = await sessionAgent.serverWorker.emit("backup"); + res.send(error ? error.message : "Your request was successful: the server successfully created a new back up."); + }) + }); + + register({ + method: Method.GET, + subscription: this.secureSubscriber("kill"), + secureHandler: this.authorizedAction(({ res }) => { + res.send("Your request was successful: the server and its session have been killed."); + sessionAgent.killSession("an authorized user has manually ended the server session via the /kill route"); + }) + }); + + } + +}
\ No newline at end of file diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index da1f83b75..74f45ae62 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -41,7 +41,7 @@ export default class UploadManager extends ApiManager { register({ method: Method.POST, subscription: "/upload", - onValidation: async ({ req, res }) => { + secureHandler: async ({ req, res }) => { const form = new formidable.IncomingForm(); form.uploadDir = pathToDirectory(Directory.parsed_files); form.keepExtensions = true; @@ -62,7 +62,7 @@ export default class UploadManager extends ApiManager { register({ method: Method.POST, subscription: "/uploadDoc", - onValidation: ({ req, res }) => { + secureHandler: ({ req, res }) => { const form = new formidable.IncomingForm(); form.keepExtensions = true; // let path = req.body.path; @@ -166,7 +166,7 @@ export default class UploadManager extends ApiManager { register({ method: Method.POST, subscription: "/inspectImage", - onValidation: async ({ req, res }) => { + secureHandler: async ({ req, res }) => { const { source } = req.body; if (typeof source === "string") { const { serverAccessPaths } = await DashUploadUtils.UploadImage(source); @@ -179,7 +179,7 @@ export default class UploadManager extends ApiManager { register({ method: Method.POST, subscription: "/uploadURI", - onValidation: ({ req, res }) => { + secureHandler: ({ req, res }) => { const uri = req.body.uri; const filename = req.body.name; if (!uri || !filename) { diff --git a/src/server/ApiManagers/UserManager.ts b/src/server/ApiManagers/UserManager.ts index 4556e01ea..36d48e366 100644 --- a/src/server/ApiManagers/UserManager.ts +++ b/src/server/ApiManagers/UserManager.ts @@ -18,7 +18,7 @@ export default class UserManager extends ApiManager { register({ method: Method.GET, subscription: "/getUsers", - onValidation: async ({ res }) => { + secureHandler: async ({ res }) => { const cursor = await Database.Instance.query({}, { email: 1, userDocumentId: 1 }, "users"); const results = await cursor.toArray(); res.send(results.map(user => ({ email: user.email, userDocumentId: user.userDocumentId }))); @@ -28,14 +28,14 @@ export default class UserManager extends ApiManager { register({ method: Method.GET, subscription: "/getUserDocumentId", - onValidation: ({ res, user }) => res.send(user.userDocumentId) + secureHandler: ({ res, user }) => res.send(user.userDocumentId) }); register({ method: Method.GET, subscription: "/getCurrentUser", - onValidation: ({ res, user }) => res.send(JSON.stringify(user)), - onUnauthenticated: ({ res }) => res.send(JSON.stringify({ id: "__guest__", email: "" })) + secureHandler: ({ res, user }) => res.send(JSON.stringify(user)), + publicHandler: ({ res }) => res.send(JSON.stringify({ id: "__guest__", email: "" })) }); register({ @@ -94,7 +94,7 @@ export default class UserManager extends ApiManager { register({ method: Method.GET, subscription: "/activity", - onValidation: ({ res }) => { + secureHandler: ({ res }) => { const now = Date.now(); const activeTimes: ActivityUnit[] = []; diff --git a/src/server/ApiManagers/UtilManager.ts b/src/server/ApiManagers/UtilManager.ts index 601a7d0d0..a0d0d0f4b 100644 --- a/src/server/ApiManagers/UtilManager.ts +++ b/src/server/ApiManagers/UtilManager.ts @@ -3,6 +3,7 @@ import { Method } from "../RouteManager"; import { exec } from 'child_process'; import { command_line } from "../ActionUtilities"; import RouteSubscriber from "../RouteSubscriber"; +import { red } from "colors"; export default class UtilManager extends ApiManager { @@ -11,13 +12,20 @@ export default class UtilManager extends ApiManager { register({ method: Method.GET, subscription: new RouteSubscriber("environment").add("key"), - onValidation: ({ req, res }) => res.send(process.env[req.params.key]) + secureHandler: ({ req, res }) => { + const { key } = req.params; + const value = process.env[key]; + if (!value) { + console.log(red(`process.env.${key} is not defined.`)); + } + return res.send(value); + } }); register({ method: Method.GET, subscription: "/pull", - onValidation: async ({ res }) => { + secureHandler: async ({ res }) => { return new Promise<void>(resolve => { exec('"C:\\Program Files\\Git\\git-bash.exe" -c "git pull"', err => { if (err) { @@ -34,8 +42,8 @@ export default class UtilManager extends ApiManager { register({ method: Method.GET, subscription: "/buxton", - onValidation: async ({ res }) => { - const cwd = '../scraping/buxton'; + secureHandler: async ({ res }) => { + const cwd = './src/scraping/buxton'; const onResolved = (stdout: string) => { console.log(stdout); res.redirect("/"); }; const onRejected = (err: any) => { console.error(err.message); res.send(err); }; @@ -48,7 +56,7 @@ export default class UtilManager extends ApiManager { register({ method: Method.GET, subscription: "/version", - onValidation: ({ res }) => { + secureHandler: ({ res }) => { return new Promise<void>(resolve => { exec('"C:\\Program Files\\Git\\bin\\git.exe" rev-parse HEAD', (err, stdout) => { if (err) { diff --git a/src/server/DashSession/DashSessionAgent.ts b/src/server/DashSession/DashSessionAgent.ts new file mode 100644 index 000000000..c55e01243 --- /dev/null +++ b/src/server/DashSession/DashSessionAgent.ts @@ -0,0 +1,223 @@ +import { Email, pathFromRoot } from "../ActionUtilities"; +import { red, yellow, green, cyan } from "colors"; +import { get } from "request-promise"; +import { Utils } from "../../Utils"; +import { WebSocket } from "../Websocket/Websocket"; +import { MessageStore } from "../Message"; +import { launchServer, onWindows } from ".."; +import { readdirSync, statSync, createWriteStream, readFileSync, unlinkSync } from "fs"; +import * as Archiver from "archiver"; +import { resolve } from "path"; +import { AppliedSessionAgent, MessageHandler, ExitHandler, Monitor, ServerWorker } from "resilient-server-session"; +import rimraf = require("rimraf"); + +/** + * If we're the monitor (master) thread, we should launch the monitor logic for the session. + * Otherwise, we must be on a worker thread that was spawned *by* the monitor (master) thread, and thus + * our job should be to run the server. + */ +export class DashSessionAgent extends AppliedSessionAgent { + + private readonly signature = "-Dash Server Session Manager"; + private readonly releaseDesktop = pathFromRoot("../../Desktop"); + + /** + * The core method invoked when the single master thread is initialized. + * Installs event hooks, repl commands and additional IPC listeners. + */ + protected async initializeMonitor(monitor: Monitor, sessionKey: string): Promise<void> { + await this.dispatchSessionPassword(sessionKey); + monitor.addReplCommand("pull", [], () => monitor.exec("git pull")); + monitor.addReplCommand("solr", [/start|stop|index/], this.executeSolrCommand); + monitor.addReplCommand("backup", [], this.backup); + monitor.addReplCommand("debug", [/\S+\@\S+/], async ([to]) => this.dispatchZippedDebugBackup(to)); + monitor.on("backup", this.backup); + monitor.on("debug", async ({ to }) => this.dispatchZippedDebugBackup(to)); + monitor.coreHooks.onCrashDetected(this.dispatchCrashReport); + } + + /** + * The core method invoked when a server worker thread is initialized. + * Installs logic to be executed when the server worker dies. + */ + protected async initializeServerWorker(): Promise<ServerWorker> { + const worker = ServerWorker.Create(launchServer); // server initialization delegated to worker + worker.addExitHandler(this.notifyClient); + return worker; + } + + /** + * Prepares the body of the email with instructions on restoring the transmitted remote database backup locally. + */ + private _remoteDebugInstructions: string | undefined; + private generateDebugInstructions = (zipName: string, target: string): string => { + if (!this._remoteDebugInstructions) { + this._remoteDebugInstructions = readFileSync(resolve(__dirname, "./templates/remote_debug_instructions.txt"), { encoding: "utf8" }); + } + return this._remoteDebugInstructions + .replace(/__zipname__/, zipName) + .replace(/__target__/, target) + .replace(/__signature__/, this.signature); + } + + /** + * Prepares the body of the email with information regarding a crash event. + */ + private _crashInstructions: string | undefined; + private generateCrashInstructions({ name, message, stack }: Error): string { + if (!this._crashInstructions) { + this._crashInstructions = readFileSync(resolve(__dirname, "./templates/crash_instructions.txt"), { encoding: "utf8" }); + } + return this._crashInstructions + .replace(/__name__/, name || "[no error name found]") + .replace(/__message__/, message || "[no error message found]") + .replace(/__stack__/, stack || "[no error stack found]") + .replace(/__signature__/, this.signature); + } + + /** + * This sends a pseudorandomly generated guid to the configuration's recipients, allowing them alone + * to kill the server via the /kill/:key route. + */ + private dispatchSessionPassword = async (sessionKey: string): Promise<void> => { + const { mainLog } = this.sessionMonitor; + const { notificationRecipient } = DashSessionAgent; + mainLog(green("dispatching session key...")); + const error = await Email.dispatch({ + to: notificationRecipient, + subject: "Dash Release Session Admin Authentication Key", + content: [ + `Here's the key for this session (started @ ${new Date().toUTCString()}):`, + sessionKey, + this.signature + ].join("\n\n") + }); + if (error) { + this.sessionMonitor.mainLog(red(`dispatch failure @ ${notificationRecipient} (${yellow(error.message)})`)); + mainLog(red("distribution of session key experienced errors")); + } else { + mainLog(green("successfully distributed session key to recipients")); + } + } + + /** + * This sends an email with the generated crash report. + */ + private dispatchCrashReport: MessageHandler<{ error: Error }> = async ({ error: crashCause }) => { + const { mainLog } = this.sessionMonitor; + const { notificationRecipient } = DashSessionAgent; + const error = await Email.dispatch({ + to: notificationRecipient, + subject: "Dash Web Server Crash", + content: this.generateCrashInstructions(crashCause) + }); + if (error) { + this.sessionMonitor.mainLog(red(`dispatch failure @ ${notificationRecipient} ${yellow(`(${error.message})`)}`)); + mainLog(red("distribution of crash notification experienced errors")); + } else { + mainLog(green("successfully distributed crash notification to recipients")); + } + } + + /** + * Logic for interfacing with Solr. Either starts it, + * stops it, or rebuilds its indicies. + */ + private executeSolrCommand = async (args: string[]): Promise<void> => { + const { exec, mainLog } = this.sessionMonitor; + const action = args[0]; + if (action === "index") { + exec("npx ts-node ./updateSearch.ts", { cwd: pathFromRoot("./src/server") }); + } else { + const command = `${onWindows ? "solr.cmd" : "solr"} ${args[0] === "start" ? "start" : "stop -p 8983"}`; + await exec(command, { cwd: "./solr-8.3.1/bin" }); + try { + await get("http://localhost:8983"); + mainLog(green("successfully connected to 8983 after running solr initialization")); + } catch { + mainLog(red("unable to connect at 8983 after running solr initialization")); + } + } + } + + /** + * Broadcast to all clients that their connection + * is no longer valid, and explain why / what to expect. + */ + private notifyClient: ExitHandler = reason => { + const { _socket } = WebSocket; + if (_socket) { + const message = typeof reason === "boolean" ? (reason ? "exit" : "temporary") : "crash"; + Utils.Emit(_socket, MessageStore.ConnectionTerminated, message); + } + } + + /** + * Performs a backup of the database, saved to the desktop subdirectory. + * This should work as is only on our specific release server. + */ + private backup = async (): Promise<void> => this.sessionMonitor.exec("backup.bat", { cwd: this.releaseDesktop }); + + /** + * Compress either a brand new backup or the most recent backup and send it + * as an attachment to an email, dispatched to the requested recipient. + * @param mode specifies whether or not to make a new backup before exporting + * @param to the recipient of the email + */ + private async dispatchZippedDebugBackup(to: string): Promise<void> { + const { mainLog } = this.sessionMonitor; + try { + // if desired, complete an immediate backup to send + await this.backup(); + mainLog("backup complete"); + + const backupsDirectory = `${this.releaseDesktop}/backups`; + + // sort all backups by their modified time, and choose the most recent one + const target = readdirSync(backupsDirectory).map(filename => ({ + modifiedTime: statSync(`${backupsDirectory}/${filename}`).mtimeMs, + filename + })).sort((a, b) => b.modifiedTime - a.modifiedTime)[0].filename; + mainLog(`targeting ${target}...`); + + // create a zip file and to it, write the contents of the backup directory + const zipName = `${target}.zip`; + const zipPath = `${this.releaseDesktop}/${zipName}`; + const targetPath = `${backupsDirectory}/${target}`; + const output = createWriteStream(zipPath); + const zip = Archiver('zip'); + zip.pipe(output); + zip.directory(`${targetPath}/Dash`, false); + await zip.finalize(); + mainLog(`zip finalized with size ${statSync(zipPath).size} bytes, saved to ${zipPath}`); + + // dispatch the email to the recipient, containing the finalized zip file + const error = await Email.dispatch({ + to, + subject: `Remote debug: compressed backup of ${target}...`, + content: this.generateDebugInstructions(zipName, target), + attachments: [{ filename: zipName, path: zipPath }] + }); + + // since this is intended to be a zero-footprint operation, clean up + // by unlinking both the backup generated earlier in the function and the compressed zip file. + // to generate a persistent backup, just run backup. + unlinkSync(zipPath); + rimraf.sync(targetPath); + + // indicate success or failure + mainLog(`${error === null ? green("successfully dispatched") : red("failed to dispatch")} ${zipName} to ${cyan(to)}`); + error && mainLog(red(error.message)); + } catch (error) { + mainLog(red("unable to dispatch zipped backup...")); + mainLog(red(error.message)); + } + } + +} + +export namespace DashSessionAgent { + + export const notificationRecipient = "brownptcdash@gmail.com"; + +}
\ No newline at end of file diff --git a/src/server/DashSession/templates/crash_instructions.txt b/src/server/DashSession/templates/crash_instructions.txt new file mode 100644 index 000000000..65417919d --- /dev/null +++ b/src/server/DashSession/templates/crash_instructions.txt @@ -0,0 +1,14 @@ +You, as a Dash Administrator, are being notified of a server crash event. Here's what we know: + +name: +__name__ + +message: +__message__ + +stack: +__stack__ + +The server is already restarting itself, but if you're concerned, use the Remote Desktop Connection to monitor progress. + +__signature__
\ No newline at end of file diff --git a/src/server/DashSession/templates/remote_debug_instructions.txt b/src/server/DashSession/templates/remote_debug_instructions.txt new file mode 100644 index 000000000..c279c460a --- /dev/null +++ b/src/server/DashSession/templates/remote_debug_instructions.txt @@ -0,0 +1,16 @@ +Instructions: + +Download this attachment, open your downloads folder and find this file (__zipname__). +Right click on the zip file and select 'Extract to __target__\'. +Open up the command line, and remember that you can get the path to any file or directory by literally dragging it from the file system and dropping it onto the terminal. +Unless it's in your path, you'll want to navigate to the MongoDB bin directory, given for Windows: + +cd '/c/Program Files/MongoDB/Server/[your version, i.e. 4.0, goes here]/bin' + +Then run the following command (if you're in the bin folder, make that ./mongorestore ...): + +mongorestore --gzip [/path/to/directory/you/just/unzipped] --db Dash + +Assuming everything runs well, this will mirror your local database with that of the server. Now, just start the server locally and debug. + +__signature__
\ No newline at end of file diff --git a/src/server/GarbageCollector.ts b/src/server/GarbageCollector.ts index 09b52eadf..5729c3ee5 100644 --- a/src/server/GarbageCollector.ts +++ b/src/server/GarbageCollector.ts @@ -100,7 +100,7 @@ async function GarbageCollect(full: boolean = true) { if (!full) { await Database.Instance.updateMany({ _id: { $nin: notToDelete } }, { $set: { "deleted": true } }); await Database.Instance.updateMany({ _id: { $in: notToDelete } }, { $unset: { "deleted": true } }); - console.log(await Search.Instance.updateDocuments( + console.log(await Search.updateDocuments( notToDelete.map<any>(id => ({ id, deleted: { set: null } })) @@ -122,7 +122,7 @@ async function GarbageCollect(full: boolean = true) { // const result = await Database.Instance.delete({ _id: { $in: toDelete } }, "newDocuments"); console.log(`${deleted} documents deleted`); - await Search.Instance.deleteDocuments(toDelete); + await Search.deleteDocuments(toDelete); console.log("Cleared search documents"); const folder = "./src/server/public/files/"; diff --git a/src/server/IDatabase.ts b/src/server/IDatabase.ts new file mode 100644 index 000000000..6a63df485 --- /dev/null +++ b/src/server/IDatabase.ts @@ -0,0 +1,24 @@ +import * as mongodb from 'mongodb'; +import { Transferable } from './Message'; + +export const DocumentsCollection = 'documents'; +export const NewDocumentsCollection = 'newDocuments'; +export interface IDatabase { + update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert?: boolean, collectionName?: string): Promise<void>; + updateMany(query: any, update: any, collectionName?: string): Promise<mongodb.WriteOpResult>; + + replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert?: boolean, collectionName?: string): void; + + delete(query: any, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; + delete(id: string, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; + + deleteAll(collectionName?: string, persist?: boolean): Promise<any>; + + insert(value: any, collectionName?: string): Promise<void>; + + getDocument(id: string, fn: (result?: Transferable) => void, collectionName?: string): void; + getDocuments(ids: string[], fn: (result: Transferable[]) => void, collectionName?: string): void; + visit(ids: string[], fn: (result: any) => string[] | Promise<string[]>, collectionName?: string): Promise<void>; + + query(query: { [key: string]: any }, projection?: { [key: string]: 0 | 1 }, collectionName?: string): Promise<mongodb.Cursor>; +} diff --git a/src/server/MemoryDatabase.ts b/src/server/MemoryDatabase.ts new file mode 100644 index 000000000..543f96e7f --- /dev/null +++ b/src/server/MemoryDatabase.ts @@ -0,0 +1,100 @@ +import { IDatabase, DocumentsCollection, NewDocumentsCollection } from './IDatabase'; +import { Transferable } from './Message'; +import * as mongodb from 'mongodb'; + +export class MemoryDatabase implements IDatabase { + + private db: { [collectionName: string]: { [id: string]: any } } = {}; + + private getCollection(collectionName: string) { + const collection = this.db[collectionName]; + if (collection) { + return collection; + } else { + return this.db[collectionName] = {}; + } + } + + public update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, _upsert?: boolean, collectionName = DocumentsCollection): Promise<void> { + const collection = this.getCollection(collectionName); + const set = "$set"; + if (set in value) { + let currentVal = collection[id] ?? (collection[id] = {}); + const val = value[set]; + for (const key in val) { + const keys = key.split("."); + for (let i = 0; i < keys.length - 1; i++) { + const k = keys[i]; + if (typeof currentVal[k] === "object") { + currentVal = currentVal[k]; + } else { + currentVal[k] = {}; + currentVal = currentVal[k]; + } + } + currentVal[keys[keys.length - 1]] = val[key]; + } + } else { + collection[id] = value; + } + callback(null as any, {} as any); + return Promise.resolve(undefined); + } + + public updateMany(query: any, update: any, collectionName = NewDocumentsCollection): Promise<mongodb.WriteOpResult> { + throw new Error("Can't updateMany a MemoryDatabase"); + } + + public replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert?: boolean, collectionName = DocumentsCollection): void { + this.update(id, value, callback, upsert, collectionName); + } + + public delete(query: any, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; + public delete(id: string, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; + public delete(id: any, collectionName = DocumentsCollection): Promise<mongodb.DeleteWriteOpResultObject> { + const i = id.id ?? id; + delete this.getCollection(collectionName)[i]; + + return Promise.resolve({} as any); + } + + public deleteAll(collectionName = DocumentsCollection, _persist = true): Promise<any> { + delete this.db[collectionName]; + return Promise.resolve(); + } + + public insert(value: any, collectionName = DocumentsCollection): Promise<void> { + const id = value.id; + this.getCollection(collectionName)[id] = value; + return Promise.resolve(); + } + + public getDocument(id: string, fn: (result?: Transferable) => void, collectionName = NewDocumentsCollection): void { + fn(this.getCollection(collectionName)[id]); + } + public getDocuments(ids: string[], fn: (result: Transferable[]) => void, collectionName = DocumentsCollection): void { + fn(ids.map(id => this.getCollection(collectionName)[id])); + } + + public async visit(ids: string[], fn: (result: any) => string[] | Promise<string[]>, collectionName = NewDocumentsCollection): Promise<void> { + const visited = new Set<string>(); + while (ids.length) { + const count = Math.min(ids.length, 1000); + const index = ids.length - count; + const fetchIds = ids.splice(index, count).filter(id => !visited.has(id)); + if (!fetchIds.length) { + continue; + } + const docs = await new Promise<{ [key: string]: any }[]>(res => this.getDocuments(fetchIds, res, collectionName)); + for (const doc of docs) { + const id = doc.id; + visited.add(id); + ids.push(...(await fn(doc))); + } + } + } + + public query(): Promise<mongodb.Cursor> { + throw new Error("Can't query a MemoryDatabase"); + } +} diff --git a/src/server/Message.ts b/src/server/Message.ts index aaee143e8..621abfd1e 100644 --- a/src/server/Message.ts +++ b/src/server/Message.ts @@ -50,6 +50,7 @@ export namespace MessageStore { export const GetFields = new Message<string[]>("Get Fields"); // send string[] of 'id' get Transferable[] back export const GetDocument = new Message<string>("Get Document"); export const DeleteAll = new Message<any>("Delete All"); + export const ConnectionTerminated = new Message<string>("Connection Terminated"); export const GetRefField = new Message<string>("Get Ref Field"); export const GetRefFields = new Message<string[]>("Get Ref Fields"); diff --git a/src/server/ProcessFactory.ts b/src/server/ProcessFactory.ts new file mode 100644 index 000000000..acb8b3a99 --- /dev/null +++ b/src/server/ProcessFactory.ts @@ -0,0 +1,44 @@ +import { existsSync, mkdirSync } from "fs"; +import { pathFromRoot, fileDescriptorFromStream } from './ActionUtilities'; +import rimraf = require("rimraf"); +import { ChildProcess, spawn, StdioOptions } from "child_process"; +import { Stream } from "stream"; + +export namespace ProcessFactory { + + export type Sink = "pipe" | "ipc" | "ignore" | "inherit" | Stream | number | null | undefined; + + export async function createWorker(command: string, args?: readonly string[], stdio?: StdioOptions | "logfile", detached = true): Promise<ChildProcess> { + if (stdio === "logfile") { + const log_fd = await Logger.create(command, args); + stdio = ["ignore", log_fd, log_fd]; + } + const child = spawn(command, args, { detached, stdio }); + child.unref(); + return child; + } + +} + +export namespace Logger { + + const logPath = pathFromRoot("./logs"); + + export async function initialize() { + if (existsSync(logPath)) { + if (!process.env.SPAWNED) { + await new Promise<any>(resolve => rimraf(logPath, resolve)); + } + } + mkdirSync(logPath); + } + + export async function create(command: string, args?: readonly string[]): Promise<number> { + return fileDescriptorFromStream(generate_log_path(command, args)); + } + + function generate_log_path(command: string, args?: readonly string[]) { + return pathFromRoot(`./logs/${command}-${args?.length}-${new Date().toUTCString()}.log`); + } + +}
\ No newline at end of file diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts index 41204964e..d072b7709 100644 --- a/src/server/RouteManager.ts +++ b/src/server/RouteManager.ts @@ -1,6 +1,6 @@ import RouteSubscriber from "./RouteSubscriber"; import { DashUserModel } from "./authentication/models/user_model"; -import * as express from 'express'; +import { Request, Response, Express } from 'express'; import { cyan, red, green } from 'colors'; export enum Method { @@ -9,21 +9,22 @@ export enum Method { } export interface CoreArguments { - req: express.Request; - res: express.Response; + req: Request; + res: Response; isRelease: boolean; } -export type OnValidation = (core: CoreArguments & { user: DashUserModel }) => any | Promise<any>; -export type OnUnauthenticated = (core: CoreArguments) => any | Promise<any>; -export type OnError = (core: CoreArguments & { error: any }) => any | Promise<any>; +export type AuthorizedCore = CoreArguments & { user: DashUserModel }; +export type SecureHandler = (core: AuthorizedCore) => any | Promise<any>; +export type PublicHandler = (core: CoreArguments) => any | Promise<any>; +export type ErrorHandler = (core: CoreArguments & { error: any }) => any | Promise<any>; export interface RouteInitializer { method: Method; subscription: string | RouteSubscriber | (string | RouteSubscriber)[]; - onValidation: OnValidation; - onUnauthenticated?: OnUnauthenticated; - onError?: OnError; + secureHandler: SecureHandler; + publicHandler?: PublicHandler; + errorHandler?: ErrorHandler; } const registered = new Map<string, Set<Method>>(); @@ -34,7 +35,7 @@ enum RegistrationError { } export default class RouteManager { - private server: express.Express; + private server: Express; private _isRelease: boolean; private failedRegistrations: { route: string, reason: RegistrationError }[] = []; @@ -42,7 +43,7 @@ export default class RouteManager { return this._isRelease; } - constructor(server: express.Express, isRelease: boolean) { + constructor(server: Express, isRelease: boolean) { this.server = server; this._isRelease = isRelease; } @@ -67,10 +68,9 @@ export default class RouteManager { console.log('please remove all duplicate routes before continuing'); } if (malformedCount) { - console.log(`please ensure all routes adhere to ^\/[A-Za-z]+(\/\:[A-Za-z]+)*$`); + console.log(`please ensure all routes adhere to ^\/$|^\/[A-Za-z]+(\/\:[A-Za-z?]+)*$`); } - console.log(); - process.exit(0); + process.exit(1); } else { console.log(green("all server routes have been successfully registered:")); Array.from(registered.keys()).sort().forEach(route => console.log(cyan(route))); @@ -83,29 +83,34 @@ export default class RouteManager { * @param initializer */ addSupervisedRoute = (initializer: RouteInitializer): void => { - const { method, subscription, onValidation, onUnauthenticated, onError } = initializer; + const { method, subscription, secureHandler, publicHandler, errorHandler } = initializer; + const isRelease = this._isRelease; - const supervised = async (req: express.Request, res: express.Response) => { - const { user, originalUrl: target } = req; + const supervised = async (req: Request, res: Response) => { + let { user } = req; + const { originalUrl: target } = req; + if (process.env.DB === "MEM" && !user) { + user = { id: "guest", email: "", userDocumentId: "guestDocId" }; + } const core = { req, res, isRelease }; const tryExecute = async (toExecute: (args: any) => any | Promise<any>, args: any) => { try { await toExecute(args); } catch (e) { console.log(red(target), user && ("email" in user) ? "<user logged out>" : undefined); - if (onError) { - onError({ ...core, error: e }); + if (errorHandler) { + errorHandler({ ...core, error: e }); } else { _error(res, `The server encountered an internal error when serving ${target}.`, e); } } }; if (user) { - await tryExecute(onValidation, { ...core, user }); + await tryExecute(secureHandler, { ...core, user }); } else { req.session!.target = target; - if (onUnauthenticated) { - await tryExecute(onUnauthenticated, core); + if (publicHandler) { + await tryExecute(publicHandler, core); if (!res.headersSent) { res.redirect("/login"); } @@ -128,7 +133,7 @@ export default class RouteManager { } else { route = subscriber.build; } - if (!/^\/[A-Za-z]+(\/\:[A-Za-z]+)*$/g.test(route)) { + if (!/^\/$|^\/[A-Za-z]+(\/\:[A-Za-z?]+)*$/g.test(route)) { this.failedRegistrations.push({ reason: RegistrationError.Malformed, route @@ -174,22 +179,22 @@ export const STATUS = { PERMISSION_DENIED: 403 }; -export function _error(res: express.Response, message: string, error?: any) { +export function _error(res: Response, message: string, error?: any) { console.error(message); res.statusMessage = message; res.status(STATUS.EXECUTION_ERROR).send(error); } -export function _success(res: express.Response, body: any) { +export function _success(res: Response, body: any) { res.status(STATUS.OK).send(body); } -export function _invalid(res: express.Response, message: string) { +export function _invalid(res: Response, message: string) { res.statusMessage = message; res.status(STATUS.BAD_REQUEST).send(); } -export function _permission_denied(res: express.Response, message?: string) { +export function _permission_denied(res: Response, message?: string) { if (message) { res.statusMessage = message; } diff --git a/src/server/Search.ts b/src/server/Search.ts index 723dc101b..21064e520 100644 --- a/src/server/Search.ts +++ b/src/server/Search.ts @@ -1,14 +1,13 @@ import * as rp from 'request-promise'; -import { Database } from './database'; -import { thisExpression } from 'babel-types'; +import { red } from 'colors'; -export class Search { - public static Instance = new Search(); - private url = 'http://localhost:8983/solr/'; +const pathTo = (relative: string) => `http://localhost:8983/solr/dash/${relative}`; - public async updateDocument(document: any) { +export namespace Search { + + export async function updateDocument(document: any) { try { - const res = await rp.post(this.url + "dash/update", { + const res = await rp.post(pathTo("update"), { headers: { 'content-type': 'application/json' }, body: JSON.stringify([document]) }); @@ -18,9 +17,9 @@ export class Search { } } - public async updateDocuments(documents: any[]) { + export async function updateDocuments(documents: any[]) { try { - const res = await rp.post(this.url + "dash/update", { + const res = await rp.post(pathTo("update"), { headers: { 'content-type': 'application/json' }, body: JSON.stringify(documents) }); @@ -30,9 +29,9 @@ export class Search { } } - public async search(query: any) { + export async function search(query: any) { try { - const searchResults = JSON.parse(await rp.get(this.url + "dash/select", { + const searchResults = JSON.parse(await rp.get(pathTo("select"), { qs: query })); const { docs, numFound } = searchResults.response; @@ -43,9 +42,9 @@ export class Search { } } - public async clear() { + export async function clear() { try { - return await rp.post(this.url + "dash/update", { + await rp.post(pathTo("update"), { body: { delete: { query: "*:*" @@ -53,10 +52,13 @@ export class Search { }, json: true }); - } catch { } + } catch (e) { + console.log(red("Unable to clear search...")); + console.log(red(e.message)); + } } - public deleteDocuments(docs: string[]) { + export async function deleteDocuments(docs: string[]) { const promises: rp.RequestPromise[] = []; const nToDelete = 1000; let index = 0; @@ -64,7 +66,7 @@ export class Search { const count = Math.min(docs.length - index, nToDelete); const deleteIds = docs.slice(index, index + count); index += count; - promises.push(rp.post(this.url + "dash/update", { + promises.push(rp.post(pathTo("update"), { body: { delete: { query: deleteIds.map(id => `id:"${id}"`).join(" ") diff --git a/src/server/Websocket/Websocket.ts b/src/server/Websocket/Websocket.ts index 60c34aa44..6dda6956e 100644 --- a/src/server/Websocket/Websocket.ts +++ b/src/server/Websocket/Websocket.ts @@ -7,26 +7,30 @@ import { Search } from "../Search"; import * as io from 'socket.io'; import YoutubeApi from "../apis/youtube/youtubeApiSample"; import { GoogleCredentialsLoader } from "../credentials/CredentialsLoader"; -import { logPort, addBeforeExitHandler } from "../ActionUtilities"; +import { logPort } from "../ActionUtilities"; import { timeMap } from "../ApiManagers/UserManager"; import { green } from "colors"; export namespace WebSocket { + export let _socket: Socket; const clients: { [key: string]: Client } = {}; export const socketMap = new Map<SocketIO.Socket, string>(); + export let disconnect: Function; - export async function start(serverPort: number, isRelease: boolean) { + export async function start(isRelease: boolean) { await preliminaryFunctions(); - initialize(serverPort, isRelease); + initialize(isRelease); } async function preliminaryFunctions() { } - export function initialize(socketPort: number, isRelease: boolean) { + function initialize(isRelease: boolean) { const endpoint = io(); - endpoint.on("connection", function (socket: Socket) { + endpoint.on("connection", function(socket: Socket) { + _socket = socket; + socket.use((_packet, next) => { const userEmail = socketMap.get(socket); if (userEmail) { @@ -52,8 +56,14 @@ export namespace WebSocket { Utils.AddServerHandler(socket, MessageStore.DeleteFields, ids => DeleteFields(socket, ids)); Utils.AddServerHandlerCallback(socket, MessageStore.GetRefField, GetRefField); Utils.AddServerHandlerCallback(socket, MessageStore.GetRefFields, GetRefFields); + + disconnect = () => { + socket.broadcast.emit("connection_terminated", Date.now()); + socket.disconnect(true); + }; }); - addBeforeExitHandler(async () => { await new Promise<void>(resolve => endpoint.close(resolve)); }); + + const socketPort = isRelease ? Number(process.env.socketPort) : 4321; endpoint.listen(socketPort); logPort("websocket", socketPort); } @@ -73,7 +83,9 @@ export namespace WebSocket { export async function deleteFields() { await Database.Instance.deleteAll(); - await Search.Instance.clear(); + if (process.env.DISABLE_SEARCH !== "true") { + await Search.clear(); + } await Database.Instance.deleteAll('newDocuments'); } @@ -82,7 +94,9 @@ export namespace WebSocket { await Database.Instance.deleteAll('newDocuments'); await Database.Instance.deleteAll('sessions'); await Database.Instance.deleteAll('users'); - await Search.Instance.clear(); + if (process.env.DISABLE_SEARCH !== "true") { + await Search.clear(); + } } function barReceived(socket: SocketIO.Socket, userEmail: string) { @@ -104,7 +118,7 @@ export namespace WebSocket { Database.Instance.update(newValue.id, newValue, () => socket.broadcast.emit(MessageStore.SetField.Message, newValue)); if (newValue.type === Types.Text) { - Search.Instance.updateDocument({ id: newValue.id, data: (newValue as any).data }); + Search.updateDocument({ id: newValue.id, data: (newValue as any).data }); console.log("set field"); console.log("checking in"); } @@ -127,6 +141,7 @@ export namespace WebSocket { "pdf": ["_t", "url"], "audio": ["_t", "url"], "web": ["_t", "url"], + "RichTextField": ["_t", value => value.Text], "date": ["_d", value => new Date(value.date).toISOString()], "proxy": ["_i", "fieldId"], "list": ["_l", list => { @@ -171,7 +186,7 @@ export namespace WebSocket { function UpdateField(socket: Socket, diff: Diff) { Database.Instance.update(diff.id, diff.diff, () => socket.broadcast.emit(MessageStore.UpdateField.Message, diff), false, "newDocuments"); - const docfield = diff.diff.$set; + const docfield = diff.diff.$set || diff.diff.$unset; if (!docfield) { return; } @@ -190,7 +205,7 @@ export namespace WebSocket { } } if (dynfield) { - Search.Instance.updateDocument(update); + Search.updateDocument(update); } } @@ -199,16 +214,14 @@ export namespace WebSocket { socket.broadcast.emit(MessageStore.DeleteField.Message, id); }); - Search.Instance.deleteDocuments([id]); + Search.deleteDocuments([id]); } function DeleteFields(socket: Socket, ids: string[]) { Database.Instance.delete({ _id: { $in: ids } }, "newDocuments").then(() => { socket.broadcast.emit(MessageStore.DeleteFields.Message, ids); }); - - Search.Instance.deleteDocuments(ids); - + Search.deleteDocuments(ids); } function CreateField(newValue: any) { diff --git a/src/server/authentication/config/passport.ts b/src/server/authentication/config/passport.ts index 0ced99b0d..286209b20 100644 --- a/src/server/authentication/config/passport.ts +++ b/src/server/authentication/config/passport.ts @@ -1,8 +1,6 @@ import * as passport from 'passport'; import * as passportLocal from 'passport-local'; -import _ from "lodash"; import { default as User } from '../models/user_model'; -import { Request, Response, NextFunction } from "express"; const LocalStrategy = passportLocal.Strategy; @@ -28,21 +26,4 @@ passport.use(new LocalStrategy({ usernameField: 'email', passReqToCallback: true return done(undefined, user); }); }); -})); - -export let isAuthenticated = (req: Request, res: Response, next: NextFunction) => { - if (req.isAuthenticated()) { - return next(); - } - return res.redirect("/login"); -}; - -export let isAuthorized = (req: Request, res: Response, next: NextFunction) => { - const provider = req.path.split("/").slice(-1)[0]; - - if (_.find(req.user && "tokens" in req.user ? req.user["tokens"] : undefined, { kind: provider })) { - next(); - } else { - res.redirect(`/auth/${provider}`); - } -};
\ No newline at end of file +}));
\ No newline at end of file diff --git a/src/server/authentication/models/current_user_utils.ts b/src/server/authentication/models/current_user_utils.ts index 50bfb4832..36d4cd2f2 100644 --- a/src/server/authentication/models/current_user_utils.ts +++ b/src/server/authentication/models/current_user_utils.ts @@ -9,7 +9,7 @@ import { Doc, DocListCast } from "../../../new_fields/Doc"; import { List } from "../../../new_fields/List"; import { listSpec } from "../../../new_fields/Schema"; import { ScriptField, ComputedField } from "../../../new_fields/ScriptField"; -import { Cast, PromiseValue } from "../../../new_fields/Types"; +import { Cast, PromiseValue, StrCast } from "../../../new_fields/Types"; import { Utils } from "../../../Utils"; import { nullAudio } from "../../../new_fields/URLField"; import { DragManager } from "../../../client/util/DragManager"; @@ -41,12 +41,13 @@ export class CurrentUserUtils { } // setup the "creator" buttons for the sidebar-- eg. the default set of draggable document creation tools - static setupCreatorButtons(doc: Doc) { + static setupCreatorButtons(doc: Doc, buttons?: string[]) { const notes = CurrentUserUtils.setupNoteTypes(doc); doc.noteTypes = Docs.Create.TreeDocument(notes, { title: "Note Types", height: 75 }); doc.activePen = doc; const docProtoData: { title: string, icon: string, drag?: string, ignoreClick?: boolean, click?: string, ischecked?: string, activePen?: Doc, backgroundColor?: string, dragFactory?: Doc }[] = [ { title: "collection", icon: "folder", ignoreClick: true, drag: 'Docs.Create.FreeformDocument([], { nativeWidth: undefined, nativeHeight: undefined, width: 150, height: 100, title: "freeform" })' }, + { title: "preview", icon: "expand", ignoreClick: true, drag: 'Docs.Create.DocumentDocument(ComputedField.MakeFunction("selectedDocs(this,true,[_last_])?.[0]"), { width: 250, height: 250, title: "container" })' }, { title: "todo item", icon: "check", ignoreClick: true, drag: 'getCopy(this.dragFactory, true)', dragFactory: notes[notes.length - 1] }, { title: "web page", icon: "globe-asia", ignoreClick: true, drag: 'Docs.Create.WebDocument("https://en.wikipedia.org/wiki/Hedgehog", { width: 300, height: 300, title: "New Webpage" })' }, { title: "cat image", icon: "cat", ignoreClick: true, drag: 'Docs.Create.ImageDocument("https://upload.wikimedia.org/wikipedia/commons/thumb/3/3a/Cat03.jpg/1200px-Cat03.jpg", { width: 200, title: "an image of a cat" })' }, @@ -60,7 +61,7 @@ export class CurrentUserUtils { { title: "use scrubber", icon: "eraser", click: 'activateScrubber(this.activePen.pen = sameDocs(this.activePen.pen, this) ? undefined : this);', ischecked: `sameDocs(this.activePen.pen, this)`, backgroundColor: "green", activePen: doc }, { title: "use drag", icon: "mouse-pointer", click: 'deactivateInk();this.activePen.pen = this;', ischecked: `sameDocs(this.activePen.pen, this)`, backgroundColor: "white", activePen: doc }, ]; - return docProtoData.map(data => Docs.Create.FontIconDocument({ + return docProtoData.filter(d => !buttons || !buttons.includes(d.title)).map(data => Docs.Create.FontIconDocument({ nativeWidth: 100, nativeHeight: 100, width: 100, height: 100, dropAction: data.click ? "copy" : undefined, title: data.title, icon: data.icon, ignoreClick: data.ignoreClick, onDragStart: data.drag ? ScriptField.MakeFunction(data.drag) : undefined, onClick: data.click ? ScriptField.MakeScript(data.click) : undefined, ischecked: data.ischecked ? ComputedField.MakeFunction(data.ischecked) : undefined, activePen: data.activePen, @@ -68,6 +69,27 @@ export class CurrentUserUtils { })); } + static async updateCreatorButtons(doc: Doc) { + const toolsBtn = await Cast(doc.ToolsBtn, Doc); + if (toolsBtn) { + const stackingDoc = await Cast(toolsBtn.sourcePanel, Doc); + if (stackingDoc) { + const stackdocs = await Cast(stackingDoc.data, listSpec(Doc)); + if (stackdocs) { + const dragset = await Cast(stackdocs[0], Doc); + if (dragset) { + const dragdocs = await Cast(dragset.data, listSpec(Doc)); + if (dragdocs) { + const dragDocs = await Promise.all(dragdocs); + const newButtons = this.setupCreatorButtons(doc, dragDocs.map(d => StrCast(d.title))); + newButtons.map(nb => Doc.AddDocToList(dragset, "data", nb)); + } + } + } + } + } + } + // setup the Creator button which will display the creator panel. This panel will include the drag creators and the color picker. when clicked, this panel will be displayed in the target container (ie, sidebarContainer) static setupToolsPanel(sidebarContainer: Doc, doc: Doc) { // setup a masonry view of all he creators @@ -202,6 +224,7 @@ export class CurrentUserUtils { doc.undoBtn && reaction(() => UndoManager.undoStack.slice(), () => Doc.GetProto(doc.undoBtn as Doc).opacity = UndoManager.CanUndo() ? 1 : 0.4, { fireImmediately: true }); doc.redoBtn && reaction(() => UndoManager.redoStack.slice(), () => Doc.GetProto(doc.redoBtn as Doc).opacity = UndoManager.CanRedo() ? 1 : 0.4, { fireImmediately: true }); + this.updateCreatorButtons(doc); return doc; } @@ -299,4 +322,4 @@ export class CurrentUserUtils { }; return recurs([] as Attribute[], schema ? schema.rootAttributeGroup : undefined); } -}
\ No newline at end of file +} diff --git a/src/server/authentication/models/user_model.ts b/src/server/authentication/models/user_model.ts index cc670a03a..78e39dbc1 100644 --- a/src/server/authentication/models/user_model.ts +++ b/src/server/authentication/models/user_model.ts @@ -73,7 +73,11 @@ userSchema.pre("save", function save(next) { }); const comparePassword: comparePasswordFunction = function (this: DashUserModel, candidatePassword, cb) { + // Choose one of the following bodies for authentication logic. + // secure bcrypt.compare(candidatePassword, this.password, cb); + // bypass password + // cb(undefined, true); }; userSchema.methods.comparePassword = comparePassword; diff --git a/src/server/database.ts b/src/server/database.ts index 5bdf1fc45..83ce865c6 100644 --- a/src/server/database.ts +++ b/src/server/database.ts @@ -5,11 +5,13 @@ import { Utils, emptyFunction } from '../Utils'; import { DashUploadUtils } from './DashUploadUtils'; import { Credentials } from 'google-auth-library'; import { GoogleApiServerUtils } from './apis/google/GoogleApiServerUtils'; +import { IDatabase } from './IDatabase'; +import { MemoryDatabase } from './MemoryDatabase'; import * as mongoose from 'mongoose'; -import { addBeforeExitHandler } from './ActionUtilities'; export namespace Database { + export let disconnect: Function; const schema = 'Dash'; const port = 27017; export const url = `mongodb://localhost:${port}/${schema}`; @@ -25,7 +27,7 @@ export namespace Database { export async function tryInitializeConnection() { try { const { connection } = mongoose; - addBeforeExitHandler(async () => { await new Promise<any>(resolve => connection.close(resolve)); }); + disconnect = async () => new Promise<any>(resolve => connection.close(resolve)); if (connection.readyState === ConnectionStates.disconnected) { await new Promise<void>((resolve, reject) => { connection.on('error', reject); @@ -44,7 +46,7 @@ export namespace Database { } } - class Database { + class Database implements IDatabase { public static DocumentsCollection = 'documents'; private MongoClient = mongodb.MongoClient; private currentWrites: { [id: string]: Promise<void> } = {}; @@ -215,7 +217,7 @@ export namespace Database { if (!fetchIds.length) { continue; } - const docs = await new Promise<{ [key: string]: any }[]>(res => Instance.getDocuments(fetchIds, res, "newDocuments")); + const docs = await new Promise<{ [key: string]: any }[]>(res => this.getDocuments(fetchIds, res, collectionName)); for (const doc of docs) { const id = doc.id; visited.add(id); @@ -262,7 +264,16 @@ export namespace Database { } } - export const Instance = new Database(); + function getDatabase() { + switch (process.env.DB) { + case "MEM": + return new MemoryDatabase(); + default: + return new Database(); + } + } + + export const Instance: IDatabase = getDatabase(); export namespace Auxiliary { @@ -331,4 +342,4 @@ export namespace Database { } -}
\ No newline at end of file +} diff --git a/src/server/index.ts b/src/server/index.ts index cef6ff476..313a2f0e2 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -3,14 +3,13 @@ import { GoogleApiServerUtils } from "./apis/google/GoogleApiServerUtils"; import * as mobileDetect from 'mobile-detect'; import * as path from 'path'; import { Database } from './database'; -const serverPort = 4321; import { DashUploadUtils } from './DashUploadUtils'; import RouteSubscriber from './RouteSubscriber'; -import initializeServer from './Initialization'; -import RouteManager, { Method, _success, _permission_denied, _error, _invalid, OnUnauthenticated } from './RouteManager'; +import initializeServer from './server_Initialization'; +import RouteManager, { Method, _success, _permission_denied, _error, _invalid, PublicHandler } from './RouteManager'; import * as qs from 'query-string'; import UtilManager from './ApiManagers/UtilManager'; -import SearchManager from './ApiManagers/SearchManager'; +import { SearchManager } from './ApiManagers/SearchManager'; import UserManager from './ApiManagers/UserManager'; import { WebSocket } from './Websocket/Websocket'; import DownloadManager from './ApiManagers/DownloadManager'; @@ -21,9 +20,14 @@ import UploadManager from "./ApiManagers/UploadManager"; import { log_execution } from "./ActionUtilities"; import GeneralGoogleManager from "./ApiManagers/GeneralGoogleManager"; import GooglePhotosManager from "./ApiManagers/GooglePhotosManager"; -import DiagnosticManager from "./ApiManagers/DiagnosticManager"; +import { Logger } from "./ProcessFactory"; import { yellow } from "colors"; +import { DashSessionAgent } from "./DashSession/DashSessionAgent"; +import SessionManager from "./ApiManagers/SessionManager"; +import { AppliedSessionAgent } from "resilient-server-session"; +export const onWindows = process.platform === "win32"; +export let sessionAgent: AppliedSessionAgent; export const publicDirectory = path.resolve(__dirname, "public"); export const filesDirectory = path.resolve(publicDirectory, "files"); @@ -33,14 +37,17 @@ export const filesDirectory = path.resolve(publicDirectory, "files"); * before clients can access the server should be run or awaited here. */ async function preliminaryFunctions() { + await Logger.initialize(); await GoogleCredentialsLoader.loadCredentials(); GoogleApiServerUtils.processProjectCredentials(); await DashUploadUtils.buildFileDirectories(); - await log_execution({ - startMessage: "attempting to initialize mongodb connection", - endMessage: "connection outcome determined", - action: Database.tryInitializeConnection - }); + if (process.env.DB !== "MEM") { + await log_execution({ + startMessage: "attempting to initialize mongodb connection", + endMessage: "connection outcome determined", + action: Database.tryInitializeConnection + }); + } } /** @@ -54,10 +61,10 @@ async function preliminaryFunctions() { */ function routeSetter({ isRelease, addSupervisedRoute, logRegistrationOutcome }: RouteManager) { const managers = [ + new SessionManager(), new UserManager(), new UploadManager(), new DownloadManager(), - new DiagnosticManager(), new SearchManager(), new PDFManager(), new DeleteManager(), @@ -69,11 +76,6 @@ function routeSetter({ isRelease, addSupervisedRoute, logRegistrationOutcome }: // initialize API Managers console.log(yellow("\nregistering server routes...")); managers.forEach(manager => manager.register(addSupervisedRoute)); - logRegistrationOutcome(); - - // initialize the web socket (bidirectional communication: if a user changes - // a field on one client, that change must be broadcast to all other clients) - WebSocket.initialize(serverPort, isRelease); /** * Accessing root index redirects to home @@ -81,10 +83,16 @@ function routeSetter({ isRelease, addSupervisedRoute, logRegistrationOutcome }: addSupervisedRoute({ method: Method.GET, subscription: "/", - onValidation: ({ res }) => res.redirect("/home") + secureHandler: ({ res }) => res.redirect("/home") }); - const serve: OnUnauthenticated = ({ req, res }) => { + addSupervisedRoute({ + method: Method.GET, + subscription: "/serverHeartbeat", + secureHandler: ({ res }) => res.send(true) + }); + + const serve: PublicHandler = ({ req, res }) => { const detector = new mobileDetect(req.headers['user-agent'] || ""); const filename = detector.mobile() !== null ? 'mobile/image.html' : 'index.html'; res.sendFile(path.join(__dirname, '../../deploy/' + filename)); @@ -93,8 +101,8 @@ function routeSetter({ isRelease, addSupervisedRoute, logRegistrationOutcome }: addSupervisedRoute({ method: Method.GET, subscription: ["/home", new RouteSubscriber("doc").add("docId")], - onValidation: serve, - onUnauthenticated: ({ req, ...remaining }) => { + secureHandler: serve, + publicHandler: ({ req, ...remaining }) => { const { originalUrl: target } = req; const sharing = qs.parse(qs.extract(req.originalUrl), { sort: false }).sharing === "true"; const docAccess = target.startsWith("/doc/"); @@ -103,13 +111,37 @@ function routeSetter({ isRelease, addSupervisedRoute, logRegistrationOutcome }: } } }); + + logRegistrationOutcome(); + + // initialize the web socket (bidirectional communication: if a user changes + // a field on one client, that change must be broadcast to all other clients) + WebSocket.start(isRelease); } -(async function start() { +/** + * This function can be used in two different ways. If not in release mode, + * this is simply the logic that is invoked to start the server. In release mode, + * however, this becomes the logic invoked by a single worker thread spawned by + * the main monitor (master) thread. + */ +export async function launchServer() { await log_execution({ startMessage: "\nstarting execution of preliminary functions", endMessage: "completed preliminary functions\n", action: preliminaryFunctions }); - await initializeServer({ serverPort: 1050, routeSetter }); -})(); + await initializeServer(routeSetter); +} + +/** + * If you're in development mode, you won't need to run a session. + * The session spawns off new server processes each time an error is encountered, and doesn't + * log the output of the server process, so it's not ideal for development. + * So, the 'else' clause is exactly what we've always run when executing npm start. + */ +if (process.env.RELEASE) { + (sessionAgent = new DashSessionAgent()).launch(); +} else { + launchServer(); +} diff --git a/src/server/remapUrl.ts b/src/server/remapUrl.ts index 5218a239a..45d2fdd33 100644 --- a/src/server/remapUrl.ts +++ b/src/server/remapUrl.ts @@ -54,7 +54,7 @@ async function update() { })); console.log("Done"); // await Promise.all(updates.map(update => { - // return limit(() => Search.Instance.updateDocument(update)); + // return limit(() => Search.updateDocument(update)); // })); cursor.close(); } diff --git a/src/server/Initialization.ts b/src/server/server_Initialization.ts index a41e2fea0..9f67c1dda 100644 --- a/src/server/Initialization.ts +++ b/src/server/server_Initialization.ts @@ -19,32 +19,39 @@ import * as fs from 'fs'; import * as request from 'request'; import RouteSubscriber from './RouteSubscriber'; import { publicDirectory } from '.'; -import { logPort, addBeforeExitHandler } from './ActionUtilities'; +import { logPort, } from './ActionUtilities'; import { timeMap } from './ApiManagers/UserManager'; import { blue, yellow } from 'colors'; +import * as cors from "cors"; /* RouteSetter is a wrapper around the server that prevents the server from being exposed. */ export type RouteSetter = (server: RouteManager) => void; -export interface InitializationOptions { - serverPort: number; - routeSetter: RouteSetter; -} +export let disconnect: Function; -export default async function InitializeServer(options: InitializationOptions) { - const { serverPort, routeSetter } = options; +export default async function InitializeServer(routeSetter: RouteSetter) { const app = buildWithMiddleware(express()); - app.use(express.static(publicDirectory)); + app.use(express.static(publicDirectory, { + setHeaders: res => res.setHeader("Access-Control-Allow-Origin", "*") + })); app.use("/images", express.static(publicDirectory)); - - app.use("*", ({ user, originalUrl }, _res, next) => { - if (!originalUrl.includes("Heartbeat")) { - const userEmail = user && ("email" in user) ? user["email"] : undefined; + const corsOptions = { + origin: function (_origin: any, callback: any) { + callback(null, true); + } + }; + app.use(cors(corsOptions)); + app.use("*", ({ user, originalUrl }, res, next) => { + if (user && !originalUrl.includes("Heartbeat")) { + const userEmail = (user as any).email; if (userEmail) { timeMap[userEmail] = Date.now(); } } + if (!user && originalUrl === "/") { + return res.redirect("/login"); + } next(); }); @@ -55,13 +62,16 @@ export default async function InitializeServer(options: InitializationOptions) { registerCorsProxy(app); const isRelease = determineEnvironment(); + routeSetter(new RouteManager(app, isRelease)); + const serverPort = isRelease ? Number(process.env.serverPort) : 1050; const server = app.listen(serverPort, () => { - logPort("server", serverPort); + logPort("server", Number(serverPort)); console.log(); }); - addBeforeExitHandler(async () => { await new Promise<Error>(resolve => server.close(resolve)); }); + disconnect = async () => new Promise<Error>(resolve => server.close(resolve)); + return isRelease; } @@ -76,7 +86,7 @@ function buildWithMiddleware(server: express.Express) { resave: true, cookie: { maxAge: week }, saveUninitialized: true, - store: new MongoStore({ url: Database.url }) + store: process.env.DB === "MEM" ? new session.MemoryStore() : new MongoStore({ url: Database.url }) }), flash(), expressFlash(), @@ -142,4 +152,4 @@ function registerCorsProxy(server: express.Express) { }); }).pipe(res); }); -}
\ No newline at end of file +} diff --git a/src/server/updateSearch.ts b/src/server/updateSearch.ts new file mode 100644 index 000000000..83094d36a --- /dev/null +++ b/src/server/updateSearch.ts @@ -0,0 +1,121 @@ +import { Database } from "./database"; +import { Search } from "./Search"; +import { log_execution } from "./ActionUtilities"; +import { cyan, green, yellow, red } from "colors"; + +const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = { + "number": "_n", + "string": "_t", + "boolean": "_b", + "image": ["_t", "url"], + "video": ["_t", "url"], + "pdf": ["_t", "url"], + "audio": ["_t", "url"], + "web": ["_t", "url"], + "date": ["_d", value => new Date(value.date).toISOString()], + "proxy": ["_i", "fieldId"], + "list": ["_l", list => { + const results = []; + for (const value of list.fields) { + const term = ToSearchTerm(value); + if (term) { + results.push(term.value); + } + } + return results.length ? results : null; + }] +}; + +function ToSearchTerm(val: any): { suffix: string, value: any } | undefined { + if (val === null || val === undefined) { + return; + } + const type = val.__type || typeof val; + let suffix = suffixMap[type]; + if (!suffix) { + return; + } + + if (Array.isArray(suffix)) { + const accessor = suffix[1]; + if (typeof accessor === "function") { + val = accessor(val); + } else { + val = val[accessor]; + } + suffix = suffix[0]; + } + + return { suffix, value: val }; +} + +async function update() { + console.log(green("Beginning update...")); + await log_execution<void>({ + startMessage: "Clearing existing Solr information...", + endMessage: "Solr information successfully cleared", + action: Search.clear, + color: cyan + }); + const cursor = await log_execution({ + startMessage: "Connecting to and querying for all documents from database...", + endMessage: ({ result, error }) => { + const success = error === null && result !== undefined; + if (!success) { + console.log(red("Unable to connect to the database.")); + process.exit(0); + } + return "Connection successful and query complete"; + }, + action: () => Database.Instance.query({}), + color: yellow + }); + const updates: any[] = []; + let numDocs = 0; + function updateDoc(doc: any) { + numDocs++; + if ((numDocs % 50) === 0) { + console.log(`Batch of 50 complete, total of ${numDocs}`); + } + if (doc.__type !== "Doc") { + return; + } + const fields = doc.fields; + if (!fields) { + return; + } + const update: any = { id: doc._id }; + let dynfield = false; + for (const key in fields) { + const value = fields[key]; + const term = ToSearchTerm(value); + if (term !== undefined) { + const { suffix, value } = term; + update[key + suffix] = value; + dynfield = true; + } + } + if (dynfield) { + updates.push(update); + } + } + await cursor?.forEach(updateDoc); + const result = await log_execution({ + startMessage: `Dispatching updates for ${updates.length} documents`, + endMessage: "Dispatched updates complete", + action: () => Search.updateDocuments(updates), + color: cyan + }); + try { + const { status } = JSON.parse(result).responseHeader; + console.log(status ? red(`Failed with status code (${status})`) : green("Success!")); + } catch { + console.log(red("Error:")); + console.log(result); + console.log("\n"); + } + await cursor?.close(); + process.exit(0); +} + +update();
\ No newline at end of file |
