diff options
Diffstat (limited to 'src/server')
26 files changed, 948 insertions, 1169 deletions
diff --git a/src/server/ActionUtilities.ts b/src/server/ActionUtilities.ts index bc8fd6f87..55b50cc12 100644 --- a/src/server/ActionUtilities.ts +++ b/src/server/ActionUtilities.ts @@ -4,9 +4,9 @@ import { createWriteStream, exists, mkdir, readFile, unlink, writeFile } from 'f import * as nodemailer from "nodemailer"; import { MailOptions } from "nodemailer/lib/json-transport"; import * as path from 'path'; -import * as rimraf from "rimraf"; +import { rimraf } from "rimraf"; import { ExecOptions } from 'shelljs'; -import Mail = require('nodemailer/lib/mailer'); +import * as Mail from 'nodemailer/lib/mailer'; const projectRoot = path.resolve(__dirname, "../../"); export function pathFromRoot(relative?: string) { @@ -103,8 +103,10 @@ export const createIfNotExists = async (path: string) => { }; export async function Prune(rootDirectory: string): Promise<boolean> { - const error = await new Promise<Error>(resolve => rimraf(rootDirectory, resolve)); - return error === null; + // const error = await new Promise<Error>(resolve => rimraf(rootDirectory).then(resolve)); + await new Promise<void>(resolve => rimraf(rootDirectory).then(() => resolve())); + // return error === null; + return true; } export const Destroy = (mediaPath: string) => new Promise<boolean>(resolve => unlink(mediaPath, error => resolve(error === null))); diff --git a/src/server/ApiManagers/DeleteManager.ts b/src/server/ApiManagers/DeleteManager.ts index 46c0d8a8a..c6c4ca464 100644 --- a/src/server/ApiManagers/DeleteManager.ts +++ b/src/server/ApiManagers/DeleteManager.ts @@ -1,21 +1,19 @@ -import ApiManager, { Registration } from "./ApiManager"; -import { Method, _permission_denied } from "../RouteManager"; -import { WebSocket } from "../websocket"; -import { Database } from "../database"; -import rimraf = require("rimraf"); -import { filesDirectory } from ".."; -import { DashUploadUtils } from "../DashUploadUtils"; -import { mkdirSync } from "fs"; -import RouteSubscriber from "../RouteSubscriber"; +import ApiManager, { Registration } from './ApiManager'; +import { Method, _permission_denied } from '../RouteManager'; +import { WebSocket } from '../websocket'; +import { Database } from '../database'; +import { rimraf } from 'rimraf'; +import { filesDirectory } from '..'; +import { DashUploadUtils } from '../DashUploadUtils'; +import { mkdirSync } from 'fs'; +import RouteSubscriber from '../RouteSubscriber'; export default class DeleteManager extends ApiManager { - protected initialize(register: Registration): void { - register({ method: Method.GET, requireAdminInRelease: true, - subscription: new RouteSubscriber("delete").add("target?"), + subscription: new RouteSubscriber('delete').add('target?'), secureHandler: async ({ req, res }) => { const { target } = req.params; @@ -24,12 +22,12 @@ export default class DeleteManager extends ApiManager { } else { let all = false; switch (target) { - case "all": + case 'all': all = true; - case "database": + case 'database': await WebSocket.doDelete(false); if (!all) break; - case "files": + case 'files': rimraf.sync(filesDirectory); mkdirSync(filesDirectory); await DashUploadUtils.buildFileDirectories(); @@ -39,10 +37,8 @@ export default class DeleteManager extends ApiManager { } } - res.redirect("/home"); - } + res.redirect('/home'); + }, }); - } - -}
\ No newline at end of file +} diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts index be17b698e..5feb25fd4 100644 --- a/src/server/ApiManagers/GooglePhotosManager.ts +++ b/src/server/ApiManagers/GooglePhotosManager.ts @@ -1,331 +1,324 @@ -import ApiManager, { Registration } from "./ApiManager"; -import { Method, _error, _success, _invalid } from "../RouteManager"; -import * as path from "path"; -import { GoogleApiServerUtils } from "../apis/google/GoogleApiServerUtils"; -import { BatchedArray, TimeUnit } from "array-batcher"; -import { Opt } from "../../fields/Doc"; -import { DashUploadUtils, InjectSize, SizeSuffix } from "../DashUploadUtils"; -import { Database } from "../database"; -import { red } from "colors"; -import { Upload } from "../SharedMediaTypes"; -import request = require('request-promise'); -import { NewMediaItemResult } from "../apis/google/SharedTypes"; +// import ApiManager, { Registration } from './ApiManager'; +// import { Method, _error, _success, _invalid } from '../RouteManager'; +// import * as path from 'path'; +// import { GoogleApiServerUtils } from '../apis/google/GoogleApiServerUtils'; +// import { BatchedArray, TimeUnit } from 'array-batcher'; +// import { Opt } from '../../fields/Doc'; +// import { DashUploadUtils, InjectSize, SizeSuffix } from '../DashUploadUtils'; +// import { Database } from '../database'; +// import { red } from 'colors'; +// import { Upload } from '../SharedMediaTypes'; +// import * as request from 'request-promise'; +// import { NewMediaItemResult } from '../apis/google/SharedTypes'; -const prefix = "google_photos_"; -const remoteUploadError = "None of the preliminary uploads to Google's servers was successful."; -const authenticationError = "Unable to authenticate Google credentials before uploading to Google Photos!"; -const mediaError = "Unable to convert all uploaded bytes to media items!"; -const localUploadError = (count: number) => `Unable to upload ${count} images to Dash's server`; -const requestError = "Unable to execute download: the body's media items were malformed."; -const downloadError = "Encountered an error while executing downloads."; +// const prefix = 'google_photos_'; +// const remoteUploadError = "None of the preliminary uploads to Google's servers was successful."; +// const authenticationError = 'Unable to authenticate Google credentials before uploading to Google Photos!'; +// const mediaError = 'Unable to convert all uploaded bytes to media items!'; +// const localUploadError = (count: number) => `Unable to upload ${count} images to Dash's server`; +// const requestError = "Unable to execute download: the body's media items were malformed."; +// const downloadError = 'Encountered an error while executing downloads.'; -interface GooglePhotosUploadFailure { - batch: number; - index: number; - url: string; - reason: string; -} +// interface GooglePhotosUploadFailure { +// batch: number; +// index: number; +// url: string; +// reason: string; +// } -interface MediaItem { - baseUrl: string; -} +// interface MediaItem { +// baseUrl: string; +// } -interface NewMediaItem { - description: string; - simpleMediaItem: { - uploadToken: string; - }; -} +// interface NewMediaItem { +// description: string; +// simpleMediaItem: { +// uploadToken: string; +// }; +// } -/** - * This manager handles the creation of routes for google photos functionality. - */ -export default class GooglePhotosManager extends ApiManager { +// /** +// * This manager handles the creation of routes for google photos functionality. +// */ +// export default class GooglePhotosManager extends ApiManager { +// protected initialize(register: Registration): void { +// /** +// * This route receives a list of urls that point to images stored +// * on Dash's file system, and, in a two step process, uploads them to Google's servers and +// * returns the information Google generates about the associated uploaded remote images. +// */ +// register({ +// method: Method.POST, +// subscription: '/googlePhotosMediaPost', +// secureHandler: async ({ user, req, res }) => { +// const { media } = req.body; - protected initialize(register: Registration): void { +// // first we need to ensure that we know the google account to which these photos will be uploaded +// const token = (await GoogleApiServerUtils.retrieveCredentials(user.id))?.credentials?.access_token; +// if (!token) { +// return _error(res, authenticationError); +// } - /** - * This route receives a list of urls that point to images stored - * on Dash's file system, and, in a two step process, uploads them to Google's servers and - * returns the information Google generates about the associated uploaded remote images. - */ - register({ - method: Method.POST, - subscription: "/googlePhotosMediaPost", - secureHandler: async ({ user, req, res }) => { - const { media } = req.body; +// // next, having one large list or even synchronously looping over things trips a threshold +// // set on Google's servers, and would instantly return an error. So, we ease things out and send the photos to upload in +// // batches of 25, where the next batch is sent 100 millieconds after we receive a response from Google's servers. +// const failed: GooglePhotosUploadFailure[] = []; +// const batched = BatchedArray.from<Uploader.UploadSource>(media, { batchSize: 25 }); +// const interval = { magnitude: 100, unit: TimeUnit.Milliseconds }; +// const newMediaItems = await batched.batchedMapPatientInterval<NewMediaItem>(interval, async (batch, collector, { completedBatches }) => { +// for (let index = 0; index < batch.length; index++) { +// const { url, description } = batch[index]; +// // a local function used to record failure of an upload +// const fail = (reason: string) => failed.push({ reason, batch: completedBatches + 1, index, url }); +// // see image resizing - we store the size-agnostic url in our logic, but write out size-suffixed images to the file system +// // so here, given a size agnostic url, we're just making that conversion so that the file system knows which bytes to actually upload +// const imageToUpload = InjectSize(url, SizeSuffix.Original); +// // STEP 1/2: send the raw bytes of the image from our server to Google's servers. We'll get back an upload token +// // which acts as a pointer to those bytes that we can use to locate them later on +// const uploadToken = await Uploader.SendBytes(token, imageToUpload).catch(fail); +// if (!uploadToken) { +// fail(`${path.extname(url)} is not an accepted extension`); +// } else { +// // gather the upload token return from Google (a pointer they give us to the raw, currently useless bytes +// // we've uploaded to their servers) and put in the JSON format that the API accepts for image creation (used soon, below) +// collector.push({ +// description, +// simpleMediaItem: { uploadToken }, +// }); +// } +// } +// }); - // first we need to ensure that we know the google account to which these photos will be uploaded - const token = (await GoogleApiServerUtils.retrieveCredentials(user.id))?.credentials?.access_token; - if (!token) { - return _error(res, authenticationError); - } +// // inform the developer / server console of any failed upload attempts +// // does not abort the operation, since some subset of the uploads may have been successful +// const { length } = failed; +// if (length) { +// console.error(`Unable to upload ${length} image${length === 1 ? '' : 's'} to Google's servers`); +// console.log(failed.map(({ reason, batch, index, url }) => `@${batch}.${index}: ${url} failed:\n${reason}`).join('\n\n')); +// } - // next, having one large list or even synchronously looping over things trips a threshold - // set on Google's servers, and would instantly return an error. So, we ease things out and send the photos to upload in - // batches of 25, where the next batch is sent 100 millieconds after we receive a response from Google's servers. - const failed: GooglePhotosUploadFailure[] = []; - const batched = BatchedArray.from<Uploader.UploadSource>(media, { batchSize: 25 }); - const interval = { magnitude: 100, unit: TimeUnit.Milliseconds }; - const newMediaItems = await batched.batchedMapPatientInterval<NewMediaItem>( - interval, - async (batch, collector, { completedBatches }) => { - for (let index = 0; index < batch.length; index++) { - const { url, description } = batch[index]; - // a local function used to record failure of an upload - const fail = (reason: string) => failed.push({ reason, batch: completedBatches + 1, index, url }); - // see image resizing - we store the size-agnostic url in our logic, but write out size-suffixed images to the file system - // so here, given a size agnostic url, we're just making that conversion so that the file system knows which bytes to actually upload - const imageToUpload = InjectSize(url, SizeSuffix.Original); - // STEP 1/2: send the raw bytes of the image from our server to Google's servers. We'll get back an upload token - // which acts as a pointer to those bytes that we can use to locate them later on - const uploadToken = await Uploader.SendBytes(token, imageToUpload).catch(fail); - if (!uploadToken) { - fail(`${path.extname(url)} is not an accepted extension`); - } else { - // gather the upload token return from Google (a pointer they give us to the raw, currently useless bytes - // we've uploaded to their servers) and put in the JSON format that the API accepts for image creation (used soon, below) - collector.push({ - description, - simpleMediaItem: { uploadToken } - }); - } - } - } - ); +// // if none of the preliminary uploads was successful, no need to try and create images +// // report the failure to the client and return +// if (!newMediaItems.length) { +// console.error(red(`${remoteUploadError} Thus, aborting image creation. Please try again.`)); +// _error(res, remoteUploadError); +// return; +// } - // inform the developer / server console of any failed upload attempts - // does not abort the operation, since some subset of the uploads may have been successful - const { length } = failed; - if (length) { - console.error(`Unable to upload ${length} image${length === 1 ? "" : "s"} to Google's servers`); - console.log(failed.map(({ reason, batch, index, url }) => `@${batch}.${index}: ${url} failed:\n${reason}`).join('\n\n')); - } +// // STEP 2/2: create the media items and return the API's response to the client, along with any failures +// return Uploader.CreateMediaItems(token, newMediaItems, req.body.album).then( +// results => _success(res, { results, failed }), +// error => _error(res, mediaError, error) +// ); +// }, +// }); - // if none of the preliminary uploads was successful, no need to try and create images - // report the failure to the client and return - if (!newMediaItems.length) { - console.error(red(`${remoteUploadError} Thus, aborting image creation. Please try again.`)); - _error(res, remoteUploadError); - return; - } +// /** +// * This route receives a list of urls that point to images +// * stored on Google's servers and (following a *rough* heuristic) +// * uploads each image to Dash's server if it hasn't already been uploaded. +// * Unfortunately, since Google has so many of these images on its servers, +// * these user content urls expire every 6 hours. So we can't store the url of a locally uploaded +// * Google image and compare the candidate url to it to figure out if we already have it, +// * since the same bytes on their server might now be associated with a new, random url. +// * So, we do the next best thing and try to use an intrinsic attribute of those bytes as +// * an identifier: the precise content size. This works in small cases, but has the obvious flaw of failing to upload +// * an image locally if we already have uploaded another Google user content image with the exact same content size. +// */ +// register({ +// method: Method.POST, +// subscription: '/googlePhotosMediaGet', +// secureHandler: async ({ req, res }) => { +// const { mediaItems } = req.body as { mediaItems: MediaItem[] }; +// if (!mediaItems) { +// // non-starter, since the input was in an invalid format +// _invalid(res, requestError); +// return; +// } +// let failed = 0; +// const completed: Opt<Upload.ImageInformation>[] = []; +// for (const { baseUrl } of mediaItems) { +// // start by getting the content size of the remote image +// const results = await DashUploadUtils.InspectImage(baseUrl); +// if (results instanceof Error) { +// // if something went wrong here, we can't hope to upload it, so just move on to the next +// failed++; +// continue; +// } +// const { contentSize, ...attributes } = results; +// // check to see if we have uploaded a Google user content image *specifically via this route* already +// // that has this exact content size +// const found: Opt<Upload.ImageInformation> = await Database.Auxiliary.QueryUploadHistory(contentSize); +// if (!found) { +// // if we haven't, then upload it locally to Dash's server +// const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, undefined, prefix, false).catch(error => _error(res, downloadError, error)); +// if (upload) { +// completed.push(upload); +// // inform the heuristic that we've encountered an image with this content size, +// // to be later checked against in future uploads +// await Database.Auxiliary.LogUpload(upload); +// } else { +// // make note of a failure to upload locallys +// failed++; +// } +// } else { +// // if we have, the variable 'found' is handily the upload information of the +// // existing image, so we add it to the list as if we had just uploaded it now without actually +// // making a duplicate write +// completed.push(found); +// } +// } +// // if there are any failures, report a general failure to the client +// if (failed) { +// return _error(res, localUploadError(failed)); +// } +// // otherwise, return the image upload information list corresponding to the newly (or previously) +// // uploaded images +// _success(res, completed); +// }, +// }); +// } +// } - // STEP 2/2: create the media items and return the API's response to the client, along with any failures - return Uploader.CreateMediaItems(token, newMediaItems, req.body.album).then( - results => _success(res, { results, failed }), - error => _error(res, mediaError, error) - ); - } - }); +// /** +// * This namespace encompasses the logic +// * necessary to upload images to Google's server, +// * and then initialize / create those images in the Photos +// * API given the upload tokens returned from the initial +// * uploading process. +// * +// * https://developers.google.com/photos/library/reference/rest/v1/mediaItems/batchCreate +// */ +// export namespace Uploader { +// /** +// * Specifies the structure of the object +// * necessary to upload bytes to Google's servers. +// * The url is streamed to access the image's bytes, +// * and the description is what appears in Google Photos' +// * description field. +// */ +// export interface UploadSource { +// url: string; +// description: string; +// } - /** - * This route receives a list of urls that point to images - * stored on Google's servers and (following a *rough* heuristic) - * uploads each image to Dash's server if it hasn't already been uploaded. - * Unfortunately, since Google has so many of these images on its servers, - * these user content urls expire every 6 hours. So we can't store the url of a locally uploaded - * Google image and compare the candidate url to it to figure out if we already have it, - * since the same bytes on their server might now be associated with a new, random url. - * So, we do the next best thing and try to use an intrinsic attribute of those bytes as - * an identifier: the precise content size. This works in small cases, but has the obvious flaw of failing to upload - * an image locally if we already have uploaded another Google user content image with the exact same content size. - */ - register({ - method: Method.POST, - subscription: "/googlePhotosMediaGet", - secureHandler: async ({ req, res }) => { - const { mediaItems } = req.body as { mediaItems: MediaItem[] }; - if (!mediaItems) { - // non-starter, since the input was in an invalid format - _invalid(res, requestError); - return; - } - let failed = 0; - const completed: Opt<Upload.ImageInformation>[] = []; - for (const { baseUrl } of mediaItems) { - // start by getting the content size of the remote image - const results = await DashUploadUtils.InspectImage(baseUrl); - if (results instanceof Error) { - // if something went wrong here, we can't hope to upload it, so just move on to the next - failed++; - continue; - } - const { contentSize, ...attributes } = results; - // check to see if we have uploaded a Google user content image *specifically via this route* already - // that has this exact content size - const found: Opt<Upload.ImageInformation> = await Database.Auxiliary.QueryUploadHistory(contentSize); - if (!found) { - // if we haven't, then upload it locally to Dash's server - const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, undefined, prefix, false).catch(error => _error(res, downloadError, error)); - if (upload) { - completed.push(upload); - // inform the heuristic that we've encountered an image with this content size, - // to be later checked against in future uploads - await Database.Auxiliary.LogUpload(upload); - } else { - // make note of a failure to upload locallys - failed++; - } - } else { - // if we have, the variable 'found' is handily the upload information of the - // existing image, so we add it to the list as if we had just uploaded it now without actually - // making a duplicate write - completed.push(found); - } - } - // if there are any failures, report a general failure to the client - if (failed) { - return _error(res, localUploadError(failed)); - } - // otherwise, return the image upload information list corresponding to the newly (or previously) - // uploaded images - _success(res, completed); - } - }); +// /** +// * This is the format needed to pass +// * into the BatchCreate API request +// * to take a reference to raw uploaded bytes +// * and actually create an image in Google Photos. +// * +// * So, to instantiate this interface you must have already dispatched an upload +// * and received an upload token. +// */ +// export interface NewMediaItem { +// description: string; +// simpleMediaItem: { +// uploadToken: string; +// }; +// } - } -} +// /** +// * A utility function to streamline making +// * calls to the API's url - accentuates +// * the relative path in the caller. +// * @param extension the desired +// * subset of the API +// */ +// function prepend(extension: string): string { +// return `https://photoslibrary.googleapis.com/v1/${extension}`; +// } -/** - * This namespace encompasses the logic - * necessary to upload images to Google's server, - * and then initialize / create those images in the Photos - * API given the upload tokens returned from the initial - * uploading process. - * - * https://developers.google.com/photos/library/reference/rest/v1/mediaItems/batchCreate - */ -export namespace Uploader { +// /** +// * Factors out the creation of the API request's +// * authentication elements stored in the header. +// * @param type the contents of the request +// * @param token the user-specific Google access token +// */ +// function headers(type: string, token: string) { +// return { +// 'Content-Type': `application/${type}`, +// Authorization: `Bearer ${token}`, +// }; +// } - /** - * Specifies the structure of the object - * necessary to upload bytes to Google's servers. - * The url is streamed to access the image's bytes, - * and the description is what appears in Google Photos' - * description field. - */ - export interface UploadSource { - url: string; - description: string; - } +// /** +// * This is the first step in the remote image creation process. +// * Here we upload the raw bytes of the image to Google's servers by +// * setting authentication and other required header properties and including +// * the raw bytes to the image, to be uploaded, in the body of the request. +// * @param bearerToken the user-specific Google access token, specifies the account associated +// * with the eventual image creation +// * @param url the url of the image to upload +// * @param filename an optional name associated with the uploaded image - if not specified +// * defaults to the filename (basename) in the url +// */ +// export const SendBytes = async (bearerToken: string, url: string, filename?: string): Promise<any> => { +// // check if the url points to a non-image or an unsupported format +// if (!DashUploadUtils.validateExtension(url)) { +// return undefined; +// } +// const body = await request(url, { encoding: null }); // returns a readable stream with the unencoded binary image data +// const parameters = { +// method: 'POST', +// uri: prepend('uploads'), +// headers: { +// ...headers('octet-stream', bearerToken), +// 'X-Goog-Upload-File-Name': filename || path.basename(url), +// 'X-Goog-Upload-Protocol': 'raw', +// }, +// body, +// }; +// return new Promise((resolve, reject) => +// request(parameters, (error, _response, body) => { +// if (error) { +// // on rejection, the server logs the error and the offending image +// return reject(error); +// } +// resolve(body); +// }) +// ); +// }; - /** - * This is the format needed to pass - * into the BatchCreate API request - * to take a reference to raw uploaded bytes - * and actually create an image in Google Photos. - * - * So, to instantiate this interface you must have already dispatched an upload - * and received an upload token. - */ - export interface NewMediaItem { - description: string; - simpleMediaItem: { - uploadToken: string; - }; - } - - /** - * A utility function to streamline making - * calls to the API's url - accentuates - * the relative path in the caller. - * @param extension the desired - * subset of the API - */ - function prepend(extension: string): string { - return `https://photoslibrary.googleapis.com/v1/${extension}`; - } - - /** - * Factors out the creation of the API request's - * authentication elements stored in the header. - * @param type the contents of the request - * @param token the user-specific Google access token - */ - function headers(type: string, token: string) { - return { - 'Content-Type': `application/${type}`, - 'Authorization': `Bearer ${token}`, - }; - } - - /** - * This is the first step in the remote image creation process. - * Here we upload the raw bytes of the image to Google's servers by - * setting authentication and other required header properties and including - * the raw bytes to the image, to be uploaded, in the body of the request. - * @param bearerToken the user-specific Google access token, specifies the account associated - * with the eventual image creation - * @param url the url of the image to upload - * @param filename an optional name associated with the uploaded image - if not specified - * defaults to the filename (basename) in the url - */ - export const SendBytes = async (bearerToken: string, url: string, filename?: string): Promise<any> => { - // check if the url points to a non-image or an unsupported format - if (!DashUploadUtils.validateExtension(url)) { - return undefined; - } - const body = await request(url, { encoding: null }); // returns a readable stream with the unencoded binary image data - const parameters = { - method: 'POST', - uri: prepend('uploads'), - headers: { - ...headers('octet-stream', bearerToken), - 'X-Goog-Upload-File-Name': filename || path.basename(url), - 'X-Goog-Upload-Protocol': 'raw' - }, - body - }; - return new Promise((resolve, reject) => request(parameters, (error, _response, body) => { - if (error) { - // on rejection, the server logs the error and the offending image - return reject(error); - } - resolve(body); - })); - }; - - /** - * This is the second step in the remote image creation process: having uploaded - * the raw bytes of the image and received / stored pointers (upload tokens) to those - * bytes, we can now instruct the API to finalize the creation of those images by - * submitting a batch create request with the list of upload tokens and the description - * to be associated with reach resulting new image. - * @param bearerToken the user-specific Google access token, specifies the account associated - * with the eventual image creation - * @param newMediaItems a list of objects containing a description and, effectively, the - * pointer to the uploaded bytes - * @param album if included, will add all of the newly created remote images to the album - * with the specified id - */ - export const CreateMediaItems = async (bearerToken: string, newMediaItems: NewMediaItem[], album?: { id: string }): Promise<NewMediaItemResult[]> => { - // it's important to note that the API can't handle more than 50 items in each request and - // seems to need at least some latency between requests (spamming it synchronously has led to the server returning errors)... - const batched = BatchedArray.from(newMediaItems, { batchSize: 50 }); - // ...so we execute them in delayed batches and await the entire execution - return batched.batchedMapPatientInterval( - { magnitude: 100, unit: TimeUnit.Milliseconds }, - async (batch: NewMediaItem[], collector): Promise<void> => { - const parameters = { - method: 'POST', - headers: headers('json', bearerToken), - uri: prepend('mediaItems:batchCreate'), - body: { newMediaItems: batch } as any, - json: true - }; - // register the target album, if provided - album && (parameters.body.albumId = album.id); - collector.push(...(await new Promise<NewMediaItemResult[]>((resolve, reject) => { - request(parameters, (error, _response, body) => { - if (error) { - reject(error); - } else { - resolve(body.newMediaItemResults); - } - }); - }))); - } - ); - }; - -}
\ No newline at end of file +// /** +// * This is the second step in the remote image creation process: having uploaded +// * the raw bytes of the image and received / stored pointers (upload tokens) to those +// * bytes, we can now instruct the API to finalize the creation of those images by +// * submitting a batch create request with the list of upload tokens and the description +// * to be associated with reach resulting new image. +// * @param bearerToken the user-specific Google access token, specifies the account associated +// * with the eventual image creation +// * @param newMediaItems a list of objects containing a description and, effectively, the +// * pointer to the uploaded bytes +// * @param album if included, will add all of the newly created remote images to the album +// * with the specified id +// */ +// export const CreateMediaItems = async (bearerToken: string, newMediaItems: NewMediaItem[], album?: { id: string }): Promise<NewMediaItemResult[]> => { +// // it's important to note that the API can't handle more than 50 items in each request and +// // seems to need at least some latency between requests (spamming it synchronously has led to the server returning errors)... +// const batched = BatchedArray.from(newMediaItems, { batchSize: 50 }); +// // ...so we execute them in delayed batches and await the entire execution +// return batched.batchedMapPatientInterval({ magnitude: 100, unit: TimeUnit.Milliseconds }, async (batch: NewMediaItem[], collector): Promise<void> => { +// const parameters = { +// method: 'POST', +// headers: headers('json', bearerToken), +// uri: prepend('mediaItems:batchCreate'), +// body: { newMediaItems: batch } as any, +// json: true, +// }; +// // register the target album, if provided +// album && (parameters.body.albumId = album.id); +// collector.push( +// ...(await new Promise<NewMediaItemResult[]>((resolve, reject) => { +// request(parameters, (error, _response, body) => { +// if (error) { +// reject(error); +// } else { +// resolve(body.newMediaItemResults); +// } +// }); +// })) +// ); +// }); +// }; +// } diff --git a/src/server/ApiManagers/PDFManager.ts b/src/server/ApiManagers/PDFManager.ts deleted file mode 100644 index e419d3ac4..000000000 --- a/src/server/ApiManagers/PDFManager.ts +++ /dev/null @@ -1,116 +0,0 @@ -import ApiManager, { Registration } from "./ApiManager"; -import { Method } from "../RouteManager"; -import RouteSubscriber from "../RouteSubscriber"; -import { existsSync, createReadStream, createWriteStream } from "fs"; -import * as Pdfjs from 'pdfjs-dist/legacy/build/pdf'; -import { createCanvas } from "canvas"; -const imageSize = require("probe-image-size"); -import * as express from "express"; -import * as path from "path"; -import { Directory, serverPathToFile, clientPathToFile, pathToDirectory } from "./UploadManager"; -import { red } from "colors"; -import { resolve } from "path"; - -export default class PDFManager extends ApiManager { - - protected initialize(register: Registration): void { - - register({ - method: Method.POST, - subscription: new RouteSubscriber("thumbnail"), - secureHandler: async ({ req, res }) => { - const { coreFilename, pageNum, subtree } = req.body; - return getOrCreateThumbnail(coreFilename, pageNum, res, subtree); - } - }); - - } - -} - -async function getOrCreateThumbnail(coreFilename: string, pageNum: number, res: express.Response, subtree?: string): Promise<void> { - const resolved = `${coreFilename}-${pageNum}.png`; - return new Promise<void>(async resolve => { - const path = serverPathToFile(Directory.pdf_thumbnails, resolved); - if (existsSync(path)) { - const existingThumbnail = createReadStream(path); - const { err, viewport } = await new Promise<any>(resolve => { - imageSize(existingThumbnail, (err: any, viewport: any) => resolve({ err, viewport })); - }); - if (err) { - console.log(red(`In PDF thumbnail response, unable to determine dimensions of ${resolved}:`)); - console.log(err); - return; - } - dispatchThumbnail(res, viewport, resolved); - } else { - await CreateThumbnail(coreFilename, pageNum, res, subtree); - } - resolve(); - }); -} - -async function CreateThumbnail(coreFilename: string, pageNum: number, res: express.Response, subtree?: string) { - const part1 = subtree ?? ""; - const filename = `${part1}${coreFilename}.pdf`; - const sourcePath = resolve(pathToDirectory(Directory.pdfs), filename); - const documentProxy = await Pdfjs.getDocument(sourcePath).promise; - const factory = new NodeCanvasFactory(); - const page = await documentProxy.getPage(pageNum); - const viewport = page.getViewport({ scale: 1, rotation: 0, dontFlip: false }); - const { canvas, context } = factory.create(viewport.width, viewport.height); - const renderContext = { - canvasContext: context, - canvasFactory: factory, - viewport - }; - await page.render(renderContext).promise; - const pngStream = canvas.createPNGStream(); - const resolved = `${coreFilename}-${pageNum}.png`; - const pngFile = serverPathToFile(Directory.pdf_thumbnails, resolved); - const out = createWriteStream(pngFile); - pngStream.pipe(out); - return new Promise<void>((resolve, reject) => { - out.on("finish", () => { - dispatchThumbnail(res, viewport, resolved); - resolve(); - }); - out.on("error", error => { - console.log(red(`In PDF thumbnail creation, encountered the following error when piping ${pngFile}:`)); - console.log(error); - reject(); - }); - }); -} - -function dispatchThumbnail(res: express.Response, { width, height }: Pdfjs.PageViewport, thumbnailName: string) { - res.send({ - path: clientPathToFile(Directory.pdf_thumbnails, thumbnailName), - width, - height - }); -} - -class NodeCanvasFactory { - - create = (width: number, height: number) => { - const canvas = createCanvas(width, height); - const context = canvas.getContext('2d'); - return { - canvas, - context - }; - } - - reset = (canvasAndContext: any, width: number, height: number) => { - canvasAndContext.canvas.width = width; - canvasAndContext.canvas.height = height; - } - - destroy = (canvasAndContext: any) => { - canvasAndContext.canvas.width = 0; - canvasAndContext.canvas.height = 0; - canvasAndContext.canvas = null; - canvasAndContext.context = null; - } -} diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index ea5d8cb33..9b0457a25 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -1,7 +1,7 @@ import * as formidable from 'formidable'; import { createReadStream, createWriteStream, unlink, writeFile } from 'fs'; -import { basename, dirname, extname, normalize } from 'path'; -import * as sharp from 'sharp'; +import * as path from 'path'; +import Jimp from 'jimp'; import { filesDirectory, publicDirectory } from '..'; import { retrocycle } from '../../decycler/decycler'; import { DashUploadUtils, InjectSize, SizeSuffix } from '../DashUploadUtils'; @@ -11,7 +11,7 @@ import RouteSubscriber from '../RouteSubscriber'; import { AcceptableMedia, Upload } from '../SharedMediaTypes'; import ApiManager, { Registration } from './ApiManager'; import { SolrManager } from './SearchManager'; -import v4 = require('uuid/v4'); +import * as uuid from 'uuid'; import { DashVersion } from '../../fields/DocSymbols'; const AdmZip = require('adm-zip'); const imageDataUri = require('image-data-uri'); @@ -29,11 +29,11 @@ export enum Directory { } export function serverPathToFile(directory: Directory, filename: string) { - return normalize(`${filesDirectory}/${directory}/${filename}`); + return path.normalize(`${filesDirectory}/${directory}/${filename}`); } export function pathToDirectory(directory: Directory) { - return normalize(`${filesDirectory}/${directory}`); + return path.normalize(`${filesDirectory}/${directory}`); } export function clientPathToFile(directory: Directory, filename: string) { @@ -63,7 +63,7 @@ export default class UploadManager extends ApiManager { method: Method.POST, subscription: '/uploadFormData', secureHandler: async ({ req, res }) => { - const form = new formidable.IncomingForm(); + const form = new formidable.IncomingForm({ keepExtensions: true, uploadDir: pathToDirectory(Directory.parsed_files) }); let fileguids = ''; let filesize = ''; form.on('field', (e: string, value: string) => { @@ -74,28 +74,32 @@ export default class UploadManager extends ApiManager { filesize = value; } }); + fileguids.split(';').map(guid => DashUploadUtils.uploadProgress.set(guid, `upload starting`)); + form.on('progress', e => fileguids.split(';').map(guid => DashUploadUtils.uploadProgress.set(guid, `read:(${Math.round((100 * +e) / +filesize)}%) ${e} of ${filesize}`))); - form.keepExtensions = true; - form.uploadDir = pathToDirectory(Directory.parsed_files); return new Promise<void>(resolve => { form.parse(req, async (_err, _fields, files) => { const results: Upload.FileResponse[] = []; if (_err?.message) { results.push({ source: { + filepath: '', + originalFilename: 'none', + newFilename: 'none', + mimetype: 'text', size: 0, - path: 'none', - name: 'none', - type: 'none', - toJSON: () => ({ name: 'none', path: '' }), + hashAlgorithm: 'md5', + toJSON: () => ({ name: 'none', size: 0, length: 0, mtime: new Date(), filepath: '', originalFilename: 'none', newFilename: 'none', mimetype: 'text' }), }, result: { name: 'failed upload', message: `${_err.message}` }, }); } + fileguids.split(';').map(guid => DashUploadUtils.uploadProgress.set(guid, `resampling images`)); + for (const key in files) { const f = files[key]; - if (!Array.isArray(f)) { - const result = await DashUploadUtils.upload(f, key); // key is the guid used by the client to track upload progress. + if (f) { + const result = await DashUploadUtils.upload(f[0], key); // key is the guid used by the client to track upload progress. result && !(result.result instanceof Error) && results.push(result); } } @@ -164,7 +168,7 @@ export default class UploadManager extends ApiManager { if (error) { return res.send(); } - await DashUploadUtils.outputResizedImages(() => createReadStream(resolvedPath), resolvedName, pathToDirectory(Directory.images)); + await DashUploadUtils.outputResizedImages(resolvedPath, resolvedName, pathToDirectory(Directory.images)); res.send({ accessPaths: { agnostic: DashUploadUtils.getAccessPaths(Directory.images, resolvedName), @@ -193,15 +197,14 @@ export default class UploadManager extends ApiManager { method: Method.POST, subscription: '/uploadDoc', secureHandler: ({ req, res }) => { - const form = new formidable.IncomingForm(); - form.keepExtensions = true; + const form = new formidable.IncomingForm({ keepExtensions: true }); // let path = req.body.path; const ids: { [id: string]: string } = {}; let remap = true; const getId = (id: string): string => { if (!remap || id.endsWith('Proto')) return id; if (id in ids) return ids[id]; - return (ids[id] = v4()); + return (ids[id] = uuid.v4()); }; const mapFn = (doc: any) => { if (doc.id) { @@ -241,56 +244,35 @@ export default class UploadManager extends ApiManager { }; return new Promise<void>(resolve => { form.parse(req, async (_err, fields, files) => { - remap = fields.remap !== 'false'; + remap = Object.keys(fields).some(key => key === 'remap' && !fields.remap?.includes('false')); //.remap !== 'false'; // bcz: looking to see if the field 'remap' is set to 'false' let id: string = ''; let docids: string[] = []; let linkids: string[] = []; try { for (const name in files) { const f = files[name]; - const path_2 = Array.isArray(f) ? '' : f.path; - const zip = new AdmZip(path_2); + if (!f) continue; + const path_2 = f[0]; // what about the rest of the array? are we guaranteed only one value is set? + const zip = new AdmZip(path_2.filepath); zip.getEntries().forEach((entry: any) => { let entryName = entry.entryName.replace(/%%%/g, '/'); if (!entryName.startsWith('files/')) { return; } - const extension = extname(entryName); + const extension = path.extname(entryName); const pathname = publicDirectory + '/' + entry.entryName; const targetname = publicDirectory + '/' + entryName; try { zip.extractEntryTo(entry.entryName, publicDirectory, true, false); createReadStream(pathname).pipe(createWriteStream(targetname)); - if (extension !== '.pdf') { - const { pngs, jpgs } = AcceptableMedia; - const resizers = [ - { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Small }, - { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Medium }, - { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Large }, - ]; - let isImage = false; - if (pngs.includes(extension)) { - resizers.forEach(element => { - element.resizer = element.resizer.png(); - }); - isImage = true; - } else if (jpgs.includes(extension)) { - resizers.forEach(element => { - element.resizer = element.resizer.jpeg(); - }); - isImage = true; - } - if (isImage) { - resizers.forEach(resizer => { - createReadStream(pathname) - .on('error', e => console.log('Resizing read:' + e)) - .pipe(resizer.resizer) - .on('error', e => console.log('Resizing write: ' + e)) - .pipe(createWriteStream(targetname.replace('_o' + extension, resizer.suffix + extension)).on('error', e => console.log('Resizing write: ' + e))); - }); - } - } - unlink(pathname, () => {}); + Jimp.read(pathname).then(img => { + DashUploadUtils.imageResampleSizes(extension).forEach(({ width, suffix }) => { + const outputPath = InjectSize(targetname, suffix); + if (!width) createReadStream(pathname).pipe(createWriteStream(outputPath)); + else img = img.resize(width, Jimp.AUTO).write(outputPath); + }); + unlink(pathname, () => {}); + }); } catch (e) { console.log(e); } @@ -317,7 +299,7 @@ export default class UploadManager extends ApiManager { } catch (e) { console.log(e); } - unlink(path_2, () => {}); + unlink(path_2.filepath, () => {}); } SolrManager.update(); res.send(JSON.stringify({ id, docids, linkids } || 'error')); @@ -346,7 +328,7 @@ export default class UploadManager extends ApiManager { method: Method.POST, subscription: '/uploadURI', secureHandler: ({ req, res }) => { - const uri = req.body.uri; + const uri: any = req.body.uri; const filename = req.body.name; const origSuffix = req.body.nosuffix ? SizeSuffix.None : SizeSuffix.Original; const deleteFiles = req.body.replaceRootFilename; @@ -362,36 +344,16 @@ export default class UploadManager extends ApiManager { .map((f: any) => fs.unlinkSync(path + f)); } return imageDataUri.outputFile(uri, serverPathToFile(Directory.images, InjectSize(filename, origSuffix))).then((savedName: string) => { - const ext = extname(savedName).toLowerCase(); - const { pngs, jpgs } = AcceptableMedia; - const resizers = !origSuffix - ? [{ resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Medium }] - : [ - { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Small }, - { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Medium }, - { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: SizeSuffix.Large }, - ]; - let isImage = false; - if (pngs.includes(ext)) { - resizers.forEach(element => { - element.resizer = element.resizer.png(); - }); - isImage = true; - } else if (jpgs.includes(ext)) { - resizers.forEach(element => { - element.resizer = element.resizer.jpeg(); - }); - isImage = true; - } - if (isImage) { - resizers.forEach(resizer => { - const path = serverPathToFile(Directory.images, InjectSize(filename, resizer.suffix) + ext); - createReadStream(savedName) - .on('error', e => console.log('Resizing read:' + e)) - .pipe(resizer.resizer) - .on('error', e => console.log('Resizing write: ' + e)) - .pipe(createWriteStream(path).on('error', e => console.log('Resizing write: ' + e))); - }); + const ext = path.extname(savedName).toLowerCase(); + if (AcceptableMedia.imageFormats.includes(ext)) { + Jimp.read(savedName).then(img => + (!origSuffix ? [{ width: 400, suffix: SizeSuffix.Medium }] : Object.values(DashUploadUtils.Sizes)) // + .forEach(({ width, suffix }) => { + const outputPath = serverPathToFile(Directory.images, InjectSize(filename, suffix) + ext); + if (!width) createReadStream(savedName).pipe(createWriteStream(outputPath)); + else img = img.resize(width, Jimp.AUTO).write(outputPath); + }) + ); } res.send(clientPathToFile(Directory.images, filename + ext)); }); diff --git a/src/server/ApiManagers/UserManager.ts b/src/server/ApiManagers/UserManager.ts index 8b7994eac..0431b9bcf 100644 --- a/src/server/ApiManagers/UserManager.ts +++ b/src/server/ApiManagers/UserManager.ts @@ -7,6 +7,8 @@ import { Opt } from '../../fields/Doc'; import { WebSocket } from '../websocket'; import { resolvedPorts } from '../server_Initialization'; import { DashVersion } from '../../fields/DocSymbols'; +import { Utils } from '../../Utils'; +import { check, validationResult } from 'express-validator'; export const timeMap: { [id: string]: number } = {}; interface ActivityUnit { @@ -32,7 +34,7 @@ export default class UserManager extends ApiManager { secureHandler: async ({ user, req, res }) => { const result: any = {}; user.cacheDocumentIds = req.body.cacheDocumentIds; - user.save(err => { + user.save().then(undefined, err => { if (err) { result.error = [{ msg: 'Error while caching documents' }]; } @@ -49,7 +51,7 @@ export default class UserManager extends ApiManager { method: Method.GET, subscription: '/getUserDocumentIds', secureHandler: ({ res, user }) => res.send({ userDocumentId: user.userDocumentId, linkDatabaseId: user.linkDatabaseId, sharingDocumentId: user.sharingDocumentId }), - publicHandler: ({ res }) => res.send({ userDocumentId: '__guest__', linkDatabaseId: 3, sharingDocumentId: 2 }), + publicHandler: ({ res }) => res.send({ userDocumentId: Utils.GuestID(), linkDatabaseId: 3, sharingDocumentId: 2 }), }); register({ @@ -81,7 +83,7 @@ export default class UserManager extends ApiManager { resolvedPorts, }) ), - publicHandler: ({ res }) => res.send(JSON.stringify({ id: '__guest__', email: 'guest' })), + publicHandler: ({ res }) => res.send(JSON.stringify({ userDocumentId: Utils.GuestID(), email: 'guest', resolvedPorts })), }); register({ @@ -107,15 +109,18 @@ export default class UserManager extends ApiManager { return; } - req.assert('new_pass', 'Password must be at least 4 characters long').len({ min: 4 }); - req.assert('new_confirm', 'Passwords do not match').equals(new_pass); + check('new_pass', 'Password must be at least 4 characters long') + .run(req) + .then(chcekcres => console.log(chcekcres)); //.len({ min: 4 }); + check('new_confirm', 'Passwords do not match') + .run(req) + .then(theres => console.log(theres)); //.equals(new_pass); if (curr_pass === new_pass) { result.error = [{ msg: 'Current and new password are the same' }]; } - // was there error in validating new passwords? - if (req.validationErrors()) { - // was there error? - result.error = req.validationErrors(); + if (validationResult(req).array().length) { + // was there error in validating new passwords? + result.error = validationResult(req); } // will only change password if there are no errors. @@ -125,7 +130,7 @@ export default class UserManager extends ApiManager { user.passwordResetExpires = undefined; } - user.save(err => { + user.save().then(undefined, err => { if (err) { result.error = [{ msg: 'Error while saving new password' }]; } diff --git a/src/server/DashSession/DashSessionAgent.ts b/src/server/DashSession/DashSessionAgent.ts index 1a5934d8f..1ef7a131d 100644 --- a/src/server/DashSession/DashSessionAgent.ts +++ b/src/server/DashSession/DashSessionAgent.ts @@ -1,18 +1,18 @@ -import { Email, pathFromRoot } from "../ActionUtilities"; -import { red, yellow, green, cyan } from "colors"; -import { get } from "request-promise"; -import { Utils } from "../../Utils"; -import { WebSocket } from "../websocket"; -import { MessageStore } from "../Message"; -import { launchServer, onWindows } from ".."; -import { readdirSync, statSync, createWriteStream, readFileSync, unlinkSync } from "fs"; -import * as Archiver from "archiver"; -import { resolve } from "path"; -import rimraf = require("rimraf"); -import { AppliedSessionAgent, ExitHandler } from "./Session/agents/applied_session_agent"; -import { ServerWorker } from "./Session/agents/server_worker"; -import { Monitor } from "./Session/agents/monitor"; -import { MessageHandler, ErrorLike } from "./Session/agents/promisified_ipc_manager"; +import { Email, pathFromRoot } from '../ActionUtilities'; +import { red, yellow, green, cyan } from 'colors'; +import { get } from 'request-promise'; +import { Utils } from '../../Utils'; +import { WebSocket } from '../websocket'; +import { MessageStore } from '../Message'; +import { launchServer, onWindows } from '..'; +import { readdirSync, statSync, createWriteStream, readFileSync, unlinkSync } from 'fs'; +import * as Archiver from 'archiver'; +import { resolve } from 'path'; +import { rimraf } from 'rimraf'; +import { AppliedSessionAgent, ExitHandler } from './Session/agents/applied_session_agent'; +import { ServerWorker } from './Session/agents/server_worker'; +import { Monitor } from './Session/agents/monitor'; +import { MessageHandler, ErrorLike } from './Session/agents/promisified_ipc_manager'; /** * If we're the monitor (master) thread, we should launch the monitor logic for the session. @@ -20,9 +20,8 @@ import { MessageHandler, ErrorLike } from "./Session/agents/promisified_ipc_mana * our job should be to run the server. */ export class DashSessionAgent extends AppliedSessionAgent { - - private readonly signature = "-Dash Server Session Manager"; - private readonly releaseDesktop = pathFromRoot("../../Desktop"); + private readonly signature = '-Dash Server Session Manager'; + private readonly releaseDesktop = pathFromRoot('../../Desktop'); /** * The core method invoked when the single master thread is initialized. @@ -31,13 +30,13 @@ export class DashSessionAgent extends AppliedSessionAgent { protected async initializeMonitor(monitor: Monitor): Promise<string> { const sessionKey = Utils.GenerateGuid(); await this.dispatchSessionPassword(sessionKey); - monitor.addReplCommand("pull", [], () => monitor.exec("git pull")); - monitor.addReplCommand("solr", [/start|stop|index/], this.executeSolrCommand); - monitor.addReplCommand("backup", [], this.backup); - monitor.addReplCommand("debug", [/\S+\@\S+/], async ([to]) => this.dispatchZippedDebugBackup(to)); - monitor.on("backup", this.backup); - monitor.on("debug", async ({ to }) => this.dispatchZippedDebugBackup(to)); - monitor.on("delete", WebSocket.doDelete); + monitor.addReplCommand('pull', [], () => monitor.exec('git pull')); + monitor.addReplCommand('solr', [/start|stop|index/], this.executeSolrCommand); + monitor.addReplCommand('backup', [], this.backup); + monitor.addReplCommand('debug', [/\S+\@\S+/], async ([to]) => this.dispatchZippedDebugBackup(to)); + monitor.on('backup', this.backup); + monitor.on('debug', async ({ to }) => this.dispatchZippedDebugBackup(to)); + monitor.on('delete', WebSocket.doDelete); monitor.coreHooks.onCrashDetected(this.dispatchCrashReport); return sessionKey; } @@ -58,13 +57,13 @@ export class DashSessionAgent extends AppliedSessionAgent { private _remoteDebugInstructions: string | undefined; private generateDebugInstructions = (zipName: string, target: string): string => { if (!this._remoteDebugInstructions) { - this._remoteDebugInstructions = readFileSync(resolve(__dirname, "./templates/remote_debug_instructions.txt"), { encoding: "utf8" }); + this._remoteDebugInstructions = readFileSync(resolve(__dirname, './templates/remote_debug_instructions.txt'), { encoding: 'utf8' }); } return this._remoteDebugInstructions .replace(/__zipname__/, zipName) .replace(/__target__/, target) .replace(/__signature__/, this.signature); - } + }; /** * Prepares the body of the email with information regarding a crash event. @@ -72,12 +71,12 @@ export class DashSessionAgent extends AppliedSessionAgent { private _crashInstructions: string | undefined; private generateCrashInstructions({ name, message, stack }: ErrorLike): string { if (!this._crashInstructions) { - this._crashInstructions = readFileSync(resolve(__dirname, "./templates/crash_instructions.txt"), { encoding: "utf8" }); + this._crashInstructions = readFileSync(resolve(__dirname, './templates/crash_instructions.txt'), { encoding: 'utf8' }); } return this._crashInstructions - .replace(/__name__/, name || "[no error name found]") - .replace(/__message__/, message || "[no error message found]") - .replace(/__stack__/, stack || "[no error stack found]") + .replace(/__name__/, name || '[no error name found]') + .replace(/__message__/, message || '[no error message found]') + .replace(/__stack__/, stack || '[no error stack found]') .replace(/__signature__/, this.signature); } @@ -88,23 +87,19 @@ export class DashSessionAgent extends AppliedSessionAgent { private dispatchSessionPassword = async (sessionKey: string): Promise<void> => { const { mainLog } = this.sessionMonitor; const { notificationRecipient } = DashSessionAgent; - mainLog(green("dispatching session key...")); + mainLog(green('dispatching session key...')); const error = await Email.dispatch({ to: notificationRecipient, - subject: "Dash Release Session Admin Authentication Key", - content: [ - `Here's the key for this session (started @ ${new Date().toUTCString()}):`, - sessionKey, - this.signature - ].join("\n\n") + subject: 'Dash Release Session Admin Authentication Key', + content: [`Here's the key for this session (started @ ${new Date().toUTCString()}):`, sessionKey, this.signature].join('\n\n'), }); if (error) { this.sessionMonitor.mainLog(red(`dispatch failure @ ${notificationRecipient} (${yellow(error.message)})`)); - mainLog(red("distribution of session key experienced errors")); + mainLog(red('distribution of session key experienced errors')); } else { - mainLog(green("successfully distributed session key to recipients")); + mainLog(green('successfully distributed session key to recipients')); } - } + }; /** * This sends an email with the generated crash report. @@ -114,37 +109,37 @@ export class DashSessionAgent extends AppliedSessionAgent { const { notificationRecipient } = DashSessionAgent; const error = await Email.dispatch({ to: notificationRecipient, - subject: "Dash Web Server Crash", - content: this.generateCrashInstructions(crashCause) + subject: 'Dash Web Server Crash', + content: this.generateCrashInstructions(crashCause), }); if (error) { this.sessionMonitor.mainLog(red(`dispatch failure @ ${notificationRecipient} ${yellow(`(${error.message})`)}`)); - mainLog(red("distribution of crash notification experienced errors")); + mainLog(red('distribution of crash notification experienced errors')); } else { - mainLog(green("successfully distributed crash notification to recipients")); + mainLog(green('successfully distributed crash notification to recipients')); } - } + }; /** - * Logic for interfacing with Solr. Either starts it, + * Logic for interfacing with Solr. Either starts it, * stops it, or rebuilds its indices. */ private executeSolrCommand = async (args: string[]): Promise<void> => { const { exec, mainLog } = this.sessionMonitor; const action = args[0]; - if (action === "index") { - exec("npx ts-node ./updateSearch.ts", { cwd: pathFromRoot("./src/server") }); + if (action === 'index') { + exec('npx ts-node ./updateSearch.ts', { cwd: pathFromRoot('./src/server') }); } else { - const command = `${onWindows ? "solr.cmd" : "solr"} ${args[0] === "start" ? "start" : "stop -p 8983"}`; - await exec(command, { cwd: "./solr-8.3.1/bin" }); + const command = `${onWindows ? 'solr.cmd' : 'solr'} ${args[0] === 'start' ? 'start' : 'stop -p 8983'}`; + await exec(command, { cwd: './solr-8.3.1/bin' }); try { - await get("http://localhost:8983"); - mainLog(green("successfully connected to 8983 after running solr initialization")); + await get('http://localhost:8983'); + mainLog(green('successfully connected to 8983 after running solr initialization')); } catch { - mainLog(red("unable to connect at 8983 after running solr initialization")); + mainLog(red('unable to connect at 8983 after running solr initialization')); } } - } + }; /** * Broadcast to all clients that their connection @@ -153,16 +148,16 @@ export class DashSessionAgent extends AppliedSessionAgent { private notifyClient: ExitHandler = reason => { const { _socket } = WebSocket; if (_socket) { - const message = typeof reason === "boolean" ? (reason ? "exit" : "temporary") : "crash"; + const message = typeof reason === 'boolean' ? (reason ? 'exit' : 'temporary') : 'crash'; Utils.Emit(_socket, MessageStore.ConnectionTerminated, message); } - } + }; /** * Performs a backup of the database, saved to the desktop subdirectory. * This should work as is only on our specific release server. */ - private backup = async (): Promise<void> => this.sessionMonitor.exec("backup.bat", { cwd: this.releaseDesktop }); + private backup = async (): Promise<void> => this.sessionMonitor.exec('backup.bat', { cwd: this.releaseDesktop }); /** * Compress either a brand new backup or the most recent backup and send it @@ -175,15 +170,17 @@ export class DashSessionAgent extends AppliedSessionAgent { try { // if desired, complete an immediate backup to send await this.backup(); - mainLog("backup complete"); + mainLog('backup complete'); const backupsDirectory = `${this.releaseDesktop}/backups`; // sort all backups by their modified time, and choose the most recent one - const target = readdirSync(backupsDirectory).map(filename => ({ - modifiedTime: statSync(`${backupsDirectory}/${filename}`).mtimeMs, - filename - })).sort((a, b) => b.modifiedTime - a.modifiedTime)[0].filename; + const target = readdirSync(backupsDirectory) + .map(filename => ({ + modifiedTime: statSync(`${backupsDirectory}/${filename}`).mtimeMs, + filename, + })) + .sort((a, b) => b.modifiedTime - a.modifiedTime)[0].filename; mainLog(`targeting ${target}...`); // create a zip file and to it, write the contents of the backup directory @@ -202,28 +199,25 @@ export class DashSessionAgent extends AppliedSessionAgent { to, subject: `Remote debug: compressed backup of ${target}...`, content: this.generateDebugInstructions(zipName, target), - attachments: [{ filename: zipName, path: zipPath }] + attachments: [{ filename: zipName, path: zipPath }], }); - // since this is intended to be a zero-footprint operation, clean up + // since this is intended to be a zero-footprint operation, clean up // by unlinking both the backup generated earlier in the function and the compressed zip file. // to generate a persistent backup, just run backup. unlinkSync(zipPath); rimraf.sync(targetPath); // indicate success or failure - mainLog(`${error === null ? green("successfully dispatched") : red("failed to dispatch")} ${zipName} to ${cyan(to)}`); + mainLog(`${error === null ? green('successfully dispatched') : red('failed to dispatch')} ${zipName} to ${cyan(to)}`); error && mainLog(red(error.message)); } catch (error: any) { - mainLog(red("unable to dispatch zipped backup...")); + mainLog(red('unable to dispatch zipped backup...')); mainLog(red(error.message)); } } - } export namespace DashSessionAgent { - - export const notificationRecipient = "browndashptc@gmail.com"; - + export const notificationRecipient = 'browndashptc@gmail.com'; } diff --git a/src/server/DashSession/Session/agents/applied_session_agent.ts b/src/server/DashSession/Session/agents/applied_session_agent.ts index 8339a06dc..2037e93e5 100644 --- a/src/server/DashSession/Session/agents/applied_session_agent.ts +++ b/src/server/DashSession/Session/agents/applied_session_agent.ts @@ -1,7 +1,8 @@ -import { isMaster } from "cluster"; +import * as _cluster from "cluster"; import { Monitor } from "./monitor"; import { ServerWorker } from "./server_worker"; -import { Utilities } from "../utilities/utilities"; +const cluster = _cluster as any; +const isMaster = cluster.isPrimary; export type ExitHandler = (reason: Error | boolean) => void | Promise<void>; @@ -15,13 +16,13 @@ export abstract class AppliedSessionAgent { private launched = false; public killSession = (reason: string, graceful = true, errorCode = 0) => { - const target = isMaster ? this.sessionMonitor : this.serverWorker; + const target = cluster.default.isPrimary ? this.sessionMonitor : this.serverWorker; target.killSession(reason, graceful, errorCode); } private sessionMonitorRef: Monitor | undefined; public get sessionMonitor(): Monitor { - if (!isMaster) { + if (!cluster.default.isPrimary) { this.serverWorker.emit("kill", { graceful: false, reason: "Cannot access the session monitor directly from the server worker thread.", diff --git a/src/server/DashSession/Session/agents/monitor.ts b/src/server/DashSession/Session/agents/monitor.ts index 9cb5ab576..a6fde4356 100644 --- a/src/server/DashSession/Session/agents/monitor.ts +++ b/src/server/DashSession/Session/agents/monitor.ts @@ -1,15 +1,21 @@ -import { ExitHandler } from "./applied_session_agent"; -import { Configuration, configurationSchema, defaultConfig, Identifiers, colorMapping } from "../utilities/session_config"; -import Repl, { ReplAction } from "../utilities/repl"; -import { isWorker, setupMaster, on, Worker, fork } from "cluster"; -import { manage, MessageHandler, ErrorLike } from "./promisified_ipc_manager"; -import { red, cyan, white, yellow, blue } from "colors"; -import { exec, ExecOptions } from "child_process"; -import { validate, ValidationError } from "jsonschema"; -import { Utilities } from "../utilities/utilities"; -import { readFileSync } from "fs"; -import IPCMessageReceiver from "./process_message_router"; -import { ServerWorker } from "./server_worker"; +import { ExitHandler } from './applied_session_agent'; +import { Configuration, configurationSchema, defaultConfig, Identifiers, colorMapping } from '../utilities/session_config'; +import Repl, { ReplAction } from '../utilities/repl'; +import * as _cluster from 'cluster'; +import { Worker } from 'cluster'; +import { manage, MessageHandler, ErrorLike } from './promisified_ipc_manager'; +import { red, cyan, white, yellow, blue } from 'colors'; +import { exec, ExecOptions } from 'child_process'; +import { validate, ValidationError } from 'jsonschema'; +import { Utilities } from '../utilities/utilities'; +import { readFileSync } from 'fs'; +import IPCMessageReceiver from './process_message_router'; +import { ServerWorker } from './server_worker'; +const cluster = _cluster as any; +const isWorker = cluster.isWorker; +const setupMaster = cluster.setupPrimary; +const on = cluster.on; +const fork = cluster.fork; /** * Validates and reads the configuration file, accordingly builds a child process factory @@ -26,14 +32,14 @@ export class Monitor extends IPCMessageReceiver { public static Create() { if (isWorker) { - ServerWorker.IPCManager.emit("kill", { - reason: "cannot create a monitor on the worker process.", + ServerWorker.IPCManager.emit('kill', { + reason: 'cannot create a monitor on the worker process.', graceful: false, - errorCode: 1 + errorCode: 1, }); process.exit(1); } else if (++Monitor.count > 1) { - console.error(red("cannot create more than one monitor.")); + console.error(red('cannot create more than one monitor.')); process.exit(1); } else { return new Monitor(); @@ -42,7 +48,7 @@ export class Monitor extends IPCMessageReceiver { private constructor() { super(); - console.log(this.timestamp(), cyan("initializing session...")); + console.log(this.timestamp(), cyan('initializing session...')); this.configureInternalHandlers(); this.config = this.loadAndValidateConfiguration(); this.initializeClusterFunctions(); @@ -53,8 +59,8 @@ export class Monitor extends IPCMessageReceiver { // handle exceptions in the master thread - there shouldn't be many of these // the IPC (inter process communication) channel closed exception can't seem // to be caught in a try catch, and is inconsequential, so it is ignored - process.on("uncaughtException", ({ message, stack }): void => { - if (message !== "Channel closed") { + process.on('uncaughtException', ({ message, stack }): void => { + if (message !== 'Channel closed') { this.mainLog(red(message)); if (stack) { this.mainLog(`uncaught exception\n${red(stack)}`); @@ -62,36 +68,36 @@ export class Monitor extends IPCMessageReceiver { } }); - this.on("kill", ({ reason, graceful, errorCode }) => this.killSession(reason, graceful, errorCode)); - this.on("lifecycle", ({ event }) => console.log(this.timestamp(), `${this.config.identifiers.worker.text} lifecycle phase (${event})`)); - } + this.on('kill', ({ reason, graceful, errorCode }) => this.killSession(reason, graceful, errorCode)); + this.on('lifecycle', ({ event }) => console.log(this.timestamp(), `${this.config.identifiers.worker.text} lifecycle phase (${event})`)); + }; private initializeClusterFunctions = () => { // determines whether or not we see the compilation / initialization / runtime output of each child server process - const output = this.config.showServerOutput ? "inherit" : "ignore"; - setupMaster({ stdio: ["ignore", output, output, "ipc"] }); + const output = this.config.showServerOutput ? 'inherit' : 'ignore'; + setupMaster({ stdio: ['ignore', output, output, 'ipc'] }); // a helpful cluster event called on the master thread each time a child process exits - on("exit", ({ process: { pid } }, code, signal) => { - const prompt = `server worker with process id ${pid} has exited with code ${code}${signal === null ? "" : `, having encountered signal ${signal}`}.`; + on('exit', ({ process: { pid } }: { process: { pid: any } }, code: any, signal: any) => { + const prompt = `server worker with process id ${pid} has exited with code ${code}${signal === null ? '' : `, having encountered signal ${signal}`}.`; this.mainLog(cyan(prompt)); // to make this a robust, continuous session, every time a child process dies, we immediately spawn a new one this.spawn(); }); - } + }; public finalize = (sessionKey: string): void => { if (this.finalized) { - throw new Error("Session monitor is already finalized"); + throw new Error('Session monitor is already finalized'); } this.finalized = true; this.key = sessionKey; this.spawn(); - } + }; public readonly coreHooks = Object.freeze({ onCrashDetected: (listener: MessageHandler<{ error: ErrorLike }>) => this.on(Monitor.IntrinsicEvents.CrashDetected, listener), - onServerRunning: (listener: MessageHandler<{ isFirstTime: boolean }>) => this.on(Monitor.IntrinsicEvents.ServerRunning, listener) + onServerRunning: (listener: MessageHandler<{ isFirstTime: boolean }>) => this.on(Monitor.IntrinsicEvents.ServerRunning, listener), }); /** @@ -101,12 +107,12 @@ export class Monitor extends IPCMessageReceiver { * requests to complete) or immediately. */ public killSession = async (reason: string, graceful = true, errorCode = 0) => { - this.mainLog(cyan(`exiting session ${graceful ? "clean" : "immediate"}ly`)); - this.mainLog(`session exit reason: ${(red(reason))}`); + this.mainLog(cyan(`exiting session ${graceful ? 'clean' : 'immediate'}ly`)); + this.mainLog(`session exit reason: ${red(reason)}`); await this.executeExitHandlers(true); await this.killActiveWorker(graceful, true); process.exit(errorCode); - } + }; /** * Execute the list of functions registered to be called @@ -120,27 +126,27 @@ export class Monitor extends IPCMessageReceiver { */ public addReplCommand = (basename: string, argPatterns: (RegExp | string)[], action: ReplAction) => { this.repl.registerCommand(basename, argPatterns, action); - } + }; public exec = (command: string, options?: ExecOptions) => { return new Promise<void>(resolve => { - exec(command, { ...options, encoding: "utf8" }, (error, stdout, stderr) => { + exec(command, { ...options, encoding: 'utf8' }, (error, stdout, stderr) => { if (error) { this.execLog(red(`unable to execute ${white(command)}`)); - error.message.split("\n").forEach(line => line.length && this.execLog(red(`(error) ${line}`))); + error.message.split('\n').forEach(line => line.length && this.execLog(red(`(error) ${line}`))); } else { let outLines: string[], errorLines: string[]; - if ((outLines = stdout.split("\n").filter(line => line.length)).length) { + if ((outLines = stdout.split('\n').filter(line => line.length)).length) { outLines.forEach(line => line.length && this.execLog(cyan(`(stdout) ${line}`))); } - if ((errorLines = stderr.split("\n").filter(line => line.length)).length) { + if ((errorLines = stderr.split('\n').filter(line => line.length)).length) { errorLines.forEach(line => line.length && this.execLog(yellow(`(stderr) ${line}`))); } } resolve(); }); }); - } + }; /** * Generates a blue UTC string associated with the time @@ -153,14 +159,14 @@ export class Monitor extends IPCMessageReceiver { */ public mainLog = (...optionalParams: any[]) => { console.log(this.timestamp(), this.config.identifiers.master.text, ...optionalParams); - } + }; /** * A formatted, identified and timestamped log in color for non- */ private execLog = (...optionalParams: any[]) => { console.log(this.timestamp(), this.config.identifiers.exec.text, ...optionalParams); - } + }; /** * Reads in configuration .json file only once, in the master thread @@ -169,28 +175,28 @@ export class Monitor extends IPCMessageReceiver { private loadAndValidateConfiguration = (): Configuration => { let config: Configuration | undefined; try { - console.log(this.timestamp(), cyan("validating configuration...")); + console.log(this.timestamp(), cyan('validating configuration...')); config = JSON.parse(readFileSync('./session.config.json', 'utf8')); const options = { throwError: true, - allowUnknownAttributes: false + allowUnknownAttributes: false, }; // ensure all necessary and no excess information is specified by the configuration file validate(config, configurationSchema, options); config = Utilities.preciseAssign({}, defaultConfig, config); } catch (error: any) { if (error instanceof ValidationError) { - console.log(red("\nSession configuration failed.")); - console.log("The given session.config.json configuration file is invalid."); + console.log(red('\nSession configuration failed.')); + console.log('The given session.config.json configuration file is invalid.'); console.log(`${error.instance}: ${error.stack}`); process.exit(0); - } else if (error.code === "ENOENT" && error.path === "./session.config.json") { - console.log(cyan("Loading default session parameters...")); - console.log("Consider including a session.config.json configuration file in your project root for customization."); + } else if (error.code === 'ENOENT' && error.path === './session.config.json') { + console.log(cyan('Loading default session parameters...')); + console.log('Consider including a session.config.json configuration file in your project root for customization.'); config = Utilities.preciseAssign({}, defaultConfig); } else { - console.log(red("\nSession configuration failed.")); - console.log("The following unknown error occurred during configuration."); + console.log(red('\nSession configuration failed.')); + console.log('The following unknown error occurred during configuration.'); console.log(error.stack); process.exit(0); } @@ -203,7 +209,7 @@ export class Monitor extends IPCMessageReceiver { }); return config!; } - } + }; /** * Builds the repl that allows the following commands to be typed into stdin of the master thread. @@ -213,24 +219,24 @@ export class Monitor extends IPCMessageReceiver { const boolean = /true|false/; const number = /\d+/; const letters = /[a-zA-Z]+/; - repl.registerCommand("exit", [/clean|force/], args => this.killSession("manual exit requested by repl", args[0] === "clean", 0)); - repl.registerCommand("restart", [/clean|force/], args => this.killActiveWorker(args[0] === "clean")); - repl.registerCommand("set", [letters, "port", number, boolean], args => this.setPort(args[0], Number(args[2]), args[3] === "true")); - repl.registerCommand("set", [/polling/, number, boolean], args => { + repl.registerCommand('exit', [/clean|force/], args => this.killSession('manual exit requested by repl', args[0] === 'clean', 0)); + repl.registerCommand('restart', [/clean|force/], args => this.killActiveWorker(args[0] === 'clean')); + repl.registerCommand('set', [letters, 'port', number, boolean], args => this.setPort(args[0], Number(args[2]), args[3] === 'true')); + repl.registerCommand('set', [/polling/, number, boolean], args => { const newPollingIntervalSeconds = Math.floor(Number(args[1])); if (newPollingIntervalSeconds < 0) { - this.mainLog(red("the polling interval must be a non-negative integer")); + this.mainLog(red('the polling interval must be a non-negative integer')); } else { if (newPollingIntervalSeconds !== this.config.polling.intervalSeconds) { this.config.polling.intervalSeconds = newPollingIntervalSeconds; - if (args[2] === "true") { - Monitor.IPCManager.emit("updatePollingInterval", { newPollingIntervalSeconds }); + if (args[2] === 'true') { + Monitor.IPCManager.emit('updatePollingInterval', { newPollingIntervalSeconds }); } } } }); return repl; - } + }; private executeExitHandlers = async (reason: Error | boolean) => Promise.all(this.exitHandlers.map(handler => handler(reason))); @@ -240,13 +246,13 @@ export class Monitor extends IPCMessageReceiver { private killActiveWorker = async (graceful = true, isSessionEnd = false): Promise<void> => { if (this.activeWorker && !this.activeWorker.isDead()) { if (graceful) { - Monitor.IPCManager.emit("manualExit", { isSessionEnd }); + Monitor.IPCManager.emit('manualExit', { isSessionEnd }); } else { await ServerWorker.IPCManager.destroy(); this.activeWorker.process.kill(); } } - } + }; /** * Allows the caller to set the port at which the target (be it the server, @@ -255,7 +261,7 @@ export class Monitor extends IPCMessageReceiver { * at the port. Otherwise, the updated port won't be used until / unless the child * dies on its own and triggers a restart. */ - private setPort = (port: "server" | "socket" | string, value: number, immediateRestart: boolean): void => { + private setPort = (port: 'server' | 'socket' | string, value: number, immediateRestart: boolean): void => { if (value > 1023 && value < 65536) { this.config.ports[port] = value; if (immediateRestart) { @@ -264,7 +270,7 @@ export class Monitor extends IPCMessageReceiver { } else { this.mainLog(red(`${port} is an invalid port number`)); } - } + }; /** * Kills the current active worker and proceeds to spawn a new worker, @@ -272,27 +278,29 @@ export class Monitor extends IPCMessageReceiver { */ private spawn = async (): Promise<void> => { await this.killActiveWorker(); - const { config: { polling, ports }, key } = this; + const { + config: { polling, ports }, + key, + } = this; this.activeWorker = fork({ pollingRoute: polling.route, pollingFailureTolerance: polling.failureTolerance, serverPort: ports.server, socketPort: ports.socket, pollingIntervalSeconds: polling.intervalSeconds, - session_key: key + session_key: key, }); - Monitor.IPCManager = manage(this.activeWorker.process, this.handlers); + if (this.activeWorker) { + Monitor.IPCManager = manage(this.activeWorker.process, this.handlers); + } this.mainLog(cyan(`spawned new server worker with process id ${this.activeWorker?.process.pid}`)); - } - + }; } export namespace Monitor { - export enum IntrinsicEvents { - KeyGenerated = "key_generated", - CrashDetected = "crash_detected", - ServerRunning = "server_running" + KeyGenerated = 'key_generated', + CrashDetected = 'crash_detected', + ServerRunning = 'server_running', } - -}
\ No newline at end of file +} diff --git a/src/server/DashSession/Session/agents/promisified_ipc_manager.ts b/src/server/DashSession/Session/agents/promisified_ipc_manager.ts index f6c8de521..76e218977 100644 --- a/src/server/DashSession/Session/agents/promisified_ipc_manager.ts +++ b/src/server/DashSession/Session/agents/promisified_ipc_manager.ts @@ -1,9 +1,9 @@ -import { Utilities } from "../utilities/utilities"; -import { ChildProcess } from "child_process"; +import { Utilities } from '../utilities/utilities'; +import { ChildProcess } from 'child_process'; /** * Convenience constructor - * @param target the process / worker to which to attach the specialized listeners + * @param target the process / worker to which to attach the specialized listeners */ export function manage(target: IPCTarget, handlers?: HandlerMap) { return new PromisifiedIPCManager(target, handlers); @@ -18,26 +18,30 @@ export type HandlerMap = { [name: string]: MessageHandler[] }; /** * This will always literally be a child process. But, though setting * up a manager in the parent will indeed see the target as the ChildProcess, - * setting up a manager in the child will just see itself as a regular NodeJS.Process. + * setting up a manager in the child will just see itself as a regular NodeJS.Process. */ export type IPCTarget = NodeJS.Process | ChildProcess; /** - * Specifies a general message format for this API + * Specifies a general message format for this API */ export type Message<T = any> = { name: string; args?: T; }; -export type MessageHandler<T = any> = (args: T) => (any | Promise<any>); +export type MessageHandler<T = any> = (args: T) => any | Promise<any>; /** * When a message is emitted, it is embedded with private metadata * to facilitate the resolution of promises, etc. */ -interface InternalMessage extends Message { metadata: Metadata; } -interface Metadata { isResponse: boolean; id: string; } -type InternalMessageHandler = (message: InternalMessage) => (any | Promise<any>); +interface InternalMessage extends Message { + metadata: Metadata; +} +interface Metadata { + isResponse: boolean; + id: string; +} /** * Allows for the transmission of the error's key features over IPC. @@ -56,12 +60,12 @@ export interface Response<T = any> { error?: ErrorLike; } -const destroyEvent = "__destroy__"; +const destroyEvent = '__destroy__'; /** * This is a wrapper utility class that allows the caller process * to emit an event and return a promise that resolves when it and all - * other processes listening to its emission of this event have completed. + * other processes listening to its emission of this event have completed. */ export class PromisifiedIPCManager { private readonly target: IPCTarget; @@ -75,7 +79,7 @@ export class PromisifiedIPCManager { this.target = target; if (handlers) { handlers[destroyEvent] = [this.destroyHelper]; - this.target.addListener("message", this.generateInternalHandler(handlers)); + this.target.addListener('message', this.generateInternalHandler(handlers)); } } @@ -86,26 +90,27 @@ export class PromisifiedIPCManager { */ public emit = async <T = any>(name: string, args?: any): Promise<Response<T>> => { if (this.isDestroyed) { - const error = { name: "FailedDispatch", message: "Cannot use a destroyed IPC manager to emit a message." }; + const error = { name: 'FailedDispatch', message: 'Cannot use a destroyed IPC manager to emit a message.' }; return { error }; } return new Promise<Response<T>>(resolve => { const messageId = Utilities.guid(); + type InternalMessageHandler = (message: any /* MessageListener*/) => any | Promise<any>; const responseHandler: InternalMessageHandler = ({ metadata: { id, isResponse }, args }) => { if (isResponse && id === messageId) { - this.target.removeListener("message", responseHandler); + this.target.removeListener('message', responseHandler); resolve(args); } }; - this.target.addListener("message", responseHandler); + this.target.addListener('message', responseHandler); const message = { name, args, metadata: { id: messageId, isResponse: false } }; if (!(this.target.send && this.target.send(message))) { - const error: ErrorLike = { name: "FailedDispatch", message: "Either the target's send method was undefined or the act of sending failed." }; + const error: ErrorLike = { name: 'FailedDispatch', message: "Either the target's send method was undefined or the act of sending failed." }; resolve({ error }); - this.target.removeListener("message", responseHandler); + this.target.removeListener('message', responseHandler); } }); - } + }; /** * Invoked from either the parent or the child process, this allows @@ -122,7 +127,7 @@ export class PromisifiedIPCManager { } resolve(); }); - } + }; /** * Dispatches the dummy responses and sets the isDestroyed flag to true. @@ -131,12 +136,12 @@ export class PromisifiedIPCManager { const { pendingMessages } = this; this.isDestroyed = true; Object.keys(pendingMessages).forEach(id => { - const error: ErrorLike = { name: "ManagerDestroyed", message: "The IPC manager was destroyed before the response could be returned." }; + const error: ErrorLike = { name: 'ManagerDestroyed', message: 'The IPC manager was destroyed before the response could be returned.' }; const message: InternalMessage = { name: pendingMessages[id], args: { error }, metadata: { id, isResponse: true } }; this.target.send?.(message); }); this.pendingMessages = {}; - } + }; /** * This routine receives a uniquely identified message. If the message is itself a response, @@ -145,29 +150,30 @@ export class PromisifiedIPCManager { * which will ultimately invoke the responseHandler of the original emission and resolve the * sender's promise. */ - private generateInternalHandler = (handlers: HandlerMap): MessageHandler => async (message: InternalMessage) => { - const { name, args, metadata } = message; - if (name && metadata && !metadata.isResponse) { - const { id } = metadata; - this.pendingMessages[id] = name; - let error: Error | undefined; - let results: any[] | undefined; - try { - const registered = handlers[name]; - if (registered) { - results = await Promise.all(registered.map(handler => handler(args))); + private generateInternalHandler = + (handlers: HandlerMap): MessageHandler => + async (message: InternalMessage) => { + const { name, args, metadata } = message; + if (name && metadata && !metadata.isResponse) { + const { id } = metadata; + this.pendingMessages[id] = name; + let error: Error | undefined; + let results: any[] | undefined; + try { + const registered = handlers[name]; + if (registered) { + results = await Promise.all(registered.map(handler => handler(args))); + } + } catch (e: any) { + error = e; + } + if (!this.isDestroyed && this.target.send) { + const metadata = { id, isResponse: true }; + const response: Response = { results, error }; + const message = { name, args: response, metadata }; + delete this.pendingMessages[id]; + this.target.send(message); } - } catch (e: any) { - error = e; - } - if (!this.isDestroyed && this.target.send) { - const metadata = { id, isResponse: true }; - const response: Response = { results, error }; - const message = { name, args: response, metadata }; - delete this.pendingMessages[id]; - this.target.send(message); } - } - } - -}
\ No newline at end of file + }; +} diff --git a/src/server/DashSession/Session/agents/server_worker.ts b/src/server/DashSession/Session/agents/server_worker.ts index 634b0113d..d8b3ee80b 100644 --- a/src/server/DashSession/Session/agents/server_worker.ts +++ b/src/server/DashSession/Session/agents/server_worker.ts @@ -1,4 +1,4 @@ -import { isMaster } from "cluster"; +import cluster from "cluster"; import { green, red, white, yellow } from "colors"; import { get } from "request-promise"; import { ExitHandler } from "./applied_session_agent"; @@ -22,7 +22,7 @@ export class ServerWorker extends IPCMessageReceiver { private serverPort: number; private isInitialized = false; public static Create(work: Function) { - if (isMaster) { + if (cluster.isPrimary) { console.error(red("cannot create a worker on the monitor process.")); process.exit(1); } else if (++ServerWorker.count > 1) { diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index 19cb3f240..cd9b635bd 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -5,8 +5,7 @@ import { File } from 'formidable'; import { createReadStream, createWriteStream, existsSync, readFileSync, rename, unlinkSync, writeFile } from 'fs'; import * as path from 'path'; import { basename } from 'path'; -import * as sharp from 'sharp'; -import { Readable, Stream } from 'stream'; +import Jimp from 'jimp'; import { filesDirectory, publicDirectory } from '.'; import { Opt } from '../fields/Doc'; import { ParsedPDF } from '../server/PdfTypes'; @@ -15,8 +14,8 @@ import { createIfNotExists } from './ActionUtilities'; import { clientPathToFile, Directory, pathToDirectory, serverPathToFile } from './ApiManagers/UploadManager'; import { resolvedServerUrl } from './server_Initialization'; import { AcceptableMedia, Upload } from './SharedMediaTypes'; -import request = require('request-promise'); -import formidable = require('formidable'); +import * as request from 'request-promise'; +import * as formidable from 'formidable'; import { AzureManager } from './ApiManagers/AzureManager'; import axios from 'axios'; const spawn = require('child_process').spawn; @@ -26,6 +25,7 @@ const ffmpeg = require('fluent-ffmpeg'); const fs = require('fs'); const requestImageSize = require('../client/util/request-image-size'); const md5File = require('md5-file'); +const autorotate = require('jpeg-autorotate'); export enum SizeSuffix { Small = '_s', @@ -55,9 +55,9 @@ export namespace DashUploadUtils { } export const Sizes: { [size: string]: Size } = { - SMALL: { width: 100, suffix: SizeSuffix.Small }, + LARGE: { width: 800, suffix: SizeSuffix.Large }, MEDIUM: { width: 400, suffix: SizeSuffix.Medium }, - LARGE: { width: 900, suffix: SizeSuffix.Large }, + SMALL: { width: 100, suffix: SizeSuffix.Small }, }; export function validateExtension(url: string) { @@ -120,14 +120,14 @@ export namespace DashUploadUtils { }; } - function resolveExistingFile(name: string, pat: string, directory: Directory, type?: string, duration?: number, rawText?: string) { - const data = { size: 0, path: path.basename(pat), name, type: type ?? '' }; - const file = { ...data, toJSON: () => ({ ...data, filename: data.path.replace(/.*\//, ''), mtime: duration?.toString(), mime: '', toJson: () => undefined as any }) }; + function resolveExistingFile(name: string, pat: string, directory: Directory, mimetype?: string | null, duration?: number, rawText?: string): Upload.FileResponse<Upload.FileInformation> { + const data = { size: 0, filepath: pat, name, type: mimetype ?? '', originalFilename: name, newFilename: path.basename(pat), mimetype: mimetype || null, hashAlgorithm: false as any }; + const file = { ...data, toJSON: () => ({ ...data, length: 0, filename: data.filepath.replace(/.*\//, ''), mtime: new Date(), mimetype: mimetype || null, toJson: () => undefined as any }) }; return { - source: file, + source: file || null, result: { accessPaths: { - agnostic: getAccessPaths(directory, data.path), + agnostic: getAccessPaths(directory, data.filepath), }, rawText, duration, @@ -145,8 +145,8 @@ export namespace DashUploadUtils { export function uploadYoutube(videoId: string, overwriteId: string): Promise<Upload.FileResponse> { return new Promise<Upload.FileResponse<Upload.FileInformation>>((res, rej) => { const name = videoId; - const path = name.replace(/^-/, '__') + '.mp4'; - const finalPath = serverPathToFile(Directory.videos, path); + const filepath = name.replace(/^-/, '__') + '.mp4'; + const finalPath = serverPathToFile(Directory.videos, filepath); if (existsSync(finalPath)) { uploadProgress.set(overwriteId, 'computing duration'); exec(`yt-dlp -o ${finalPath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any, stderr: any) => { @@ -156,7 +156,7 @@ export namespace DashUploadUtils { }); } else { uploadProgress.set(overwriteId, 'starting download'); - const ytdlp = spawn(`yt-dlp`, ['-o', path, `https://www.youtube.com/watch?v=${videoId}`, '--max-filesize', '100M', '-f', 'mp4']); + const ytdlp = spawn(`yt-dlp`, ['-o', filepath, `https://www.youtube.com/watch?v=${videoId}`, '--max-filesize', '100M', '-f', 'mp4']); ytdlp.stdout.on('data', (data: any) => uploadProgress.set(overwriteId, data.toString())); @@ -171,20 +171,22 @@ export namespace DashUploadUtils { res({ source: { size: 0, - path, - name, - type: '', - toJSON: () => ({ name, path }), + filepath: name, + originalFilename: name, + newFilename: name, + mimetype: 'video', + hashAlgorithm: 'md5', + toJSON: () => ({ newFilename: name, filepath, mimetype: 'video', mtime: new Date(), size: 0, length: 0, originalFilename: name }), }, result: { name: 'failed youtube query', message: `Could not archive video. ${code ? errors : uploadProgress.get(videoId)}` }, }); } else { uploadProgress.set(overwriteId, 'computing duration'); - exec(`yt-dlp-o ${path} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any, stderr: any) => { + exec(`yt-dlp-o ${filepath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any, stderr: any) => { const time = Array.from(stdout.trim().split(':')).reverse(); const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0); - const data = { size: 0, path, name, type: 'video/mp4' }; - const file = { ...data, toJSON: () => ({ ...data, filename: data.path.replace(/.*\//, ''), mtime: duration.toString(), mime: '', toJson: () => undefined as any }) }; + const data = { size: 0, filepath, name, mimetype: '', originalFilename: name, newFilename: name, hashAlgorithm: 'md5' as 'md5', type: 'video/mp4' }; + const file = { ...data, toJSON: () => ({ ...data, length: 0, filename: data.filepath.replace(/.*\//, ''), mtime: new Date(), mime: '', hashAlgorithm: 'md5' as 'md5', toJson: () => undefined as any }) }; res(MoveParsedFile(file, Directory.videos)); }); } @@ -195,18 +197,18 @@ export namespace DashUploadUtils { export async function upload(file: File, overwriteGuid?: string): Promise<Upload.FileResponse> { const isAzureOn = usingAzure(); - const { type, path, name } = file; + const { mimetype: type, filepath, originalFilename } = file; const types = type?.split('/') ?? []; - uploadProgress.set(overwriteGuid ?? name, 'uploading'); // If the client sent a guid it uses to track upload progress, use that guid. Otherwise, use the file's name. + // uploadProgress.set(overwriteGuid ?? name, 'uploading'); // If the client sent a guid it uses to track upload progress, use that guid. Otherwise, use the file's name. const category = types[0]; let format = `.${types[1]}`; - console.log(green(`Processing upload of file (${name}) and format (${format}) with upload type (${type}) in category (${category}).`)); + console.log(green(`Processing upload of file (${originalFilename}) and format (${format}) with upload type (${type}) in category (${category}).`)); switch (category) { case 'image': if (imageFormats.includes(format)) { - const result = await UploadImage(path, basename(path)); + const result = await UploadImage(filepath, basename(filepath)); return { source: file, result }; } fs.unlink(path, () => {}); @@ -215,19 +217,19 @@ export namespace DashUploadUtils { if (format.includes('x-matroska')) { console.log('case video'); await new Promise(res => - ffmpeg(file.path) + ffmpeg(file.filepath) .videoCodec('copy') // this will copy the data instead of reencode it - .save(file.path.replace('.mkv', '.mp4')) + .save(file.filepath.replace('.mkv', '.mp4')) .on('end', res) .on('error', (e: any) => console.log(e)) ); - file.path = file.path.replace('.mkv', '.mp4'); + file.filepath = file.filepath.replace('.mkv', '.mp4'); format = '.mp4'; } if (format.includes('quicktime')) { let abort = false; await new Promise<void>(res => - ffmpeg.ffprobe(file.path, (err: any, metadata: any) => { + ffmpeg.ffprobe(file.filepath, (err: any, metadata: any) => { if (metadata.streams.some((stream: any) => stream.codec_name === 'hevc')) { abort = true; } @@ -275,24 +277,26 @@ export namespace DashUploadUtils { } } - console.log(red(`Ignoring unsupported file (${name}) with upload type (${type}).`)); + console.log(red(`Ignoring unsupported file (${originalFilename}) with upload type (${type}).`)); fs.unlink(path, () => {}); - return { source: file, result: new Error(`Could not upload unsupported file (${name}) with upload type (${type}).`) }; + return { source: file, result: new Error(`Could not upload unsupported file (${originalFilename}) with upload type (${type}).`) }; } async function UploadPdf(file: File) { - const fileKey = (await md5File(file.path)) + '.pdf'; + const fileKey = (await md5File(file.filepath)) + '.pdf'; const textFilename = `${fileKey.substring(0, fileKey.length - 4)}.txt`; if (fExists(fileKey, Directory.pdfs) && fExists(textFilename, Directory.text)) { - fs.unlink(file.path, () => {}); + fs.unlink(file.filepath, () => {}); return new Promise<Upload.FileResponse>(res => { const textFilename = `${fileKey.substring(0, fileKey.length - 4)}.txt`; const readStream = createReadStream(serverPathToFile(Directory.text, textFilename)); var rawText = ''; - readStream.on('data', chunk => (rawText += chunk.toString())).on('end', () => res(resolveExistingFile(file.name, fileKey, Directory.pdfs, file.type, undefined, rawText))); + readStream + .on('data', chunk => (rawText += chunk.toString())) // + .on('end', () => res(resolveExistingFile(file.originalFilename ?? '', fileKey, Directory.pdfs, file.mimetype, undefined, rawText))); }); } - const dataBuffer = readFileSync(file.path); + const dataBuffer = readFileSync(file.filepath); const result: ParsedPDF | any = await parse(dataBuffer).catch((e: any) => e); if (!result.code) { await new Promise<void>((resolve, reject) => { @@ -301,11 +305,11 @@ export namespace DashUploadUtils { }); return MoveParsedFile(file, Directory.pdfs, undefined, result?.text, undefined, fileKey); } - return { source: file, result: { name: 'faile pdf pupload', message: `Could not upload (${file.name}).${result.message}` } }; + return { source: file, result: { name: 'faile pdf pupload', message: `Could not upload (${file.originalFilename}).${result.message}` } }; } async function UploadCsv(file: File) { - const { path: sourcePath } = file; + const { filepath: sourcePath } = file; // read the file as a string const data = readFileSync(sourcePath, 'utf8'); // split the string into an array of lines @@ -367,7 +371,7 @@ export namespace DashUploadUtils { } export interface ImageResizer { - resizer?: sharp.Sharp; + width: number; suffix: SizeSuffix; } @@ -463,12 +467,12 @@ export namespace DashUploadUtils { * to appear in the new location */ export async function MoveParsedFile(file: formidable.File, destination: Directory, suffix: string | undefined = undefined, text?: string, duration?: number, targetName?: string): Promise<Upload.FileResponse> { - const { path: sourcePath } = file; - let name = targetName ?? path.basename(sourcePath); + const { filepath } = file; + let name = targetName ?? path.basename(filepath); suffix && (name += suffix); return new Promise(resolve => { const destinationPath = serverPathToFile(destination, name); - rename(sourcePath, destinationPath, error => { + rename(filepath, destinationPath, error => { resolve({ source: file, result: error @@ -540,7 +544,7 @@ export namespace DashUploadUtils { writtenFiles = {}; } } else { - writtenFiles = await outputResizedImages(() => request(requestable), resolved, pathToDirectory(Directory.images)); + writtenFiles = await outputResizedImages(metadata.source, resolved, pathToDirectory(Directory.images)); } for (const suffix of Object.keys(writtenFiles)) { information.accessPaths[suffix] = getAccessPaths(images, writtenFiles[suffix]); @@ -595,57 +599,46 @@ export namespace DashUploadUtils { * @param outputDirectory the directory to output to, usually Directory.Images * @returns a map with suffixes as keys and resized filenames as values. */ - export async function outputResizedImages(streamProvider: () => Stream | Promise<Stream>, outputFileName: string, outputDirectory: string) { + export async function outputResizedImages(sourcePath: string, outputFileName: string, outputDirectory: string) { const writtenFiles: { [suffix: string]: string } = {}; - for (const { resizer, suffix } of resizers(path.extname(outputFileName))) { - const outputPath = path.resolve(outputDirectory, (writtenFiles[suffix] = InjectSize(outputFileName, suffix))); - await new Promise<void>(async (resolve, reject) => { - const source = streamProvider(); - let readStream = source instanceof Promise ? await source : source; - let error = false; - if (resizer) { - readStream = readStream.pipe(resizer.withMetadata()).on('error', async args => { - error = true; - if (error) { - const source2 = streamProvider(); - let readStream2: Stream | undefined; - readStream2 = source2 instanceof Promise ? await source2 : source2; - readStream2?.pipe(createWriteStream(outputPath)).on('error', resolve).on('close', resolve); - } - }); - } - !error && readStream?.pipe(createWriteStream(outputPath)).on('error', resolve).on('close', resolve); + const sizes = imageResampleSizes(path.extname(outputFileName)); + const outputPath = (suffix: SizeSuffix) => path.resolve(outputDirectory, (writtenFiles[suffix] = InjectSize(outputFileName, suffix))); + await Promise.all( + sizes.filter(({ width }) => !width).map(({ suffix }) => + new Promise<void>(res => createReadStream(sourcePath).pipe(createWriteStream(outputPath(suffix))).on('close', res)) + )); // prettier-ignore + + const fileIn = fs.readFileSync(sourcePath); + let buffer: any; + try { + const { buffer2 } = await autorotate.rotate(fileIn, { quality: 30 }); + buffer = buffer2; + } catch (e) {} + return Jimp.read(buffer ?? fileIn) + .then(async img => { + await Promise.all( sizes.filter(({ width }) => width) .map(({ width, suffix }) => + img = img.resize(width, Jimp.AUTO).write(outputPath(suffix)) + )); // prettier-ignore + return writtenFiles; + }) + .catch(e => { + console.log('ERROR' + e); + return writtenFiles; }); - } - return writtenFiles; } /** * define the resizers to use * @param ext the extension - * @returns an array of resizer functions from sharp + * @returns an array of resize descriptions */ - function resizers(ext: string): DashUploadUtils.ImageResizer[] { + export function imageResampleSizes(ext: string): DashUploadUtils.ImageResizer[] { return [ - { suffix: SizeSuffix.Original }, - ...Object.values(DashUploadUtils.Sizes).map(({ suffix, width }) => { - let initial: sharp.Sharp | undefined = sharp({ failOnError: false }).resize(width, undefined, { withoutEnlargement: true }); - if (pngs.includes(ext)) { - initial = initial.png(pngOptions); - } else if (jpgs.includes(ext)) { - initial = initial.jpeg(); - } else if (webps.includes(ext)) { - initial = initial.webp(); - } else if (tiffs.includes(ext)) { - initial = initial.tiff(); - } else if (ext === '.gif') { - initial = undefined; - } - return { - resizer: suffix === '_o' ? undefined : initial, - suffix, - }; - }), + { suffix: SizeSuffix.Original, width: 0 }, + ...[...(AcceptableMedia.imageFormats.includes(ext.toLowerCase()) ? Object.values(DashUploadUtils.Sizes) : [])].map(({ suffix, width }) => ({ + width, + suffix, + })), ]; } } diff --git a/src/server/GarbageCollector.ts b/src/server/GarbageCollector.ts index c60880882..423c719c2 100644 --- a/src/server/GarbageCollector.ts +++ b/src/server/GarbageCollector.ts @@ -65,7 +65,7 @@ async function GarbageCollect(full: boolean = true) { // await new Promise(res => setTimeout(res, 3000)); const cursor = await Database.Instance.query({}, { userDocumentId: 1 }, 'users'); const users = await cursor.toArray(); - const ids: string[] = [...users.map(user => user.userDocumentId), ...users.map(user => user.sharingDocumentId), ...users.map(user => user.linkDatabaseId)]; + const ids: string[] = [...users.map((user:any) => user.userDocumentId), ...users.map((user:any) => user.sharingDocumentId), ...users.map((user:any) => user.linkDatabaseId)]; const visited = new Set<string>(); const files: { [name: string]: string[] } = {}; @@ -95,7 +95,7 @@ async function GarbageCollect(full: boolean = true) { const notToDelete = Array.from(visited); const toDeleteCursor = await Database.Instance.query({ _id: { $nin: notToDelete } }, { _id: 1 }); - const toDelete: string[] = (await toDeleteCursor.toArray()).map(doc => doc._id); + const toDelete: string[] = (await toDeleteCursor.toArray()).map((doc:any) => doc._id); toDeleteCursor.close(); if (!full) { await Database.Instance.updateMany({ _id: { $nin: notToDelete } }, { $set: { "deleted": true } }); diff --git a/src/server/IDatabase.ts b/src/server/IDatabase.ts index dd4968579..2274792b3 100644 --- a/src/server/IDatabase.ts +++ b/src/server/IDatabase.ts @@ -3,13 +3,13 @@ import { Transferable } from './Message'; export const DocumentsCollection = 'documents'; export interface IDatabase { - update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert?: boolean, collectionName?: string): Promise<void>; - updateMany(query: any, update: any, collectionName?: string): Promise<mongodb.WriteOpResult>; + update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateResult) => void, upsert?: boolean, collectionName?: string): Promise<void>; + updateMany(query: any, update: any, collectionName?: string): Promise<mongodb.UpdateResult>; - replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert?: boolean, collectionName?: string): void; + replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateResult) => void, upsert?: boolean, collectionName?: string): void; - delete(query: any, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; - delete(id: string, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; + delete(query: any, collectionName?: string): Promise<mongodb.DeleteResult>; + delete(id: string, collectionName?: string): Promise<mongodb.DeleteResult>; dropSchema(...schemaNames: string[]): Promise<any>; @@ -20,5 +20,5 @@ export interface IDatabase { getCollectionNames(): Promise<string[]>; visit(ids: string[], fn: (result: any) => string[] | Promise<string[]>, collectionName?: string): Promise<void>; - query(query: { [key: string]: any }, projection?: { [key: string]: 0 | 1 }, collectionName?: string): Promise<mongodb.Cursor>; + query(query: { [key: string]: any }, projection?: { [key: string]: 0 | 1 }, collectionName?: string): Promise<mongodb.FindCursor>; } diff --git a/src/server/MemoryDatabase.ts b/src/server/MemoryDatabase.ts index d2d8bb3b3..b74332bf5 100644 --- a/src/server/MemoryDatabase.ts +++ b/src/server/MemoryDatabase.ts @@ -19,7 +19,7 @@ export class MemoryDatabase implements IDatabase { return Promise.resolve(Object.keys(this.db)); } - public update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, _upsert?: boolean, collectionName = DocumentsCollection): Promise<void> { + public update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateResult) => void, _upsert?: boolean, collectionName = DocumentsCollection): Promise<void> { const collection = this.getCollection(collectionName); const set = "$set"; if (set in value) { @@ -45,17 +45,17 @@ export class MemoryDatabase implements IDatabase { return Promise.resolve(undefined); } - public updateMany(query: any, update: any, collectionName = DocumentsCollection): Promise<mongodb.WriteOpResult> { + public updateMany(query: any, update: any, collectionName = DocumentsCollection): Promise<mongodb.UpdateResult> { throw new Error("Can't updateMany a MemoryDatabase"); } - public replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert?: boolean, collectionName = DocumentsCollection): void { + public replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateResult) => void, upsert?: boolean, collectionName = DocumentsCollection): void { this.update(id, value, callback, upsert, collectionName); } - public delete(query: any, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; - public delete(id: string, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; - public delete(id: any, collectionName = DocumentsCollection): Promise<mongodb.DeleteWriteOpResultObject> { + public delete(query: any, collectionName?: string): Promise<mongodb.DeleteResult>; + public delete(id: string, collectionName?: string): Promise<mongodb.DeleteResult>; + public delete(id: any, collectionName = DocumentsCollection): Promise<mongodb.DeleteResult> { const i = id.id ?? id; delete this.getCollection(collectionName)[i]; @@ -105,7 +105,7 @@ export class MemoryDatabase implements IDatabase { } } - public query(): Promise<mongodb.Cursor> { + public query(): Promise<mongodb.FindCursor> { throw new Error("Can't query a MemoryDatabase"); } } diff --git a/src/server/ProcessFactory.ts b/src/server/ProcessFactory.ts index 63682368f..f69eda4c3 100644 --- a/src/server/ProcessFactory.ts +++ b/src/server/ProcessFactory.ts @@ -2,7 +2,7 @@ import { ChildProcess, spawn, StdioOptions } from "child_process"; import { existsSync, mkdirSync } from "fs"; import { Stream } from "stream"; import { fileDescriptorFromStream, pathFromRoot } from './ActionUtilities'; -import rimraf = require("rimraf"); +import { rimraf } from "rimraf"; export namespace ProcessFactory { @@ -13,7 +13,7 @@ export namespace ProcessFactory { const log_fd = await Logger.create(command, args); stdio = ["ignore", log_fd, log_fd]; } - const child = spawn(command, args, { detached, stdio }); + const child = spawn(command, args ?? [], { detached, stdio }); child.unref(); return child; } @@ -27,7 +27,7 @@ export namespace Logger { export async function initialize() { if (existsSync(logPath)) { if (!process.env.SPAWNED) { - await new Promise<any>(resolve => rimraf(logPath, resolve)); + await new Promise<any>(resolve => rimraf(logPath).then(resolve)); } } mkdirSync(logPath); diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts index 5683cd539..540bca776 100644 --- a/src/server/RouteManager.ts +++ b/src/server/RouteManager.ts @@ -1,6 +1,7 @@ import { cyan, green, red } from 'colors'; import { Express, Request, Response } from 'express'; import { AdminPriviliges } from '.'; +import { Utils } from '../Utils'; import { DashUserModel } from './authentication/DashUserModel'; import RouteSubscriber from './RouteSubscriber'; @@ -102,7 +103,7 @@ export default class RouteManager { let user = req.user as Partial<DashUserModel> | undefined; const { originalUrl: target } = req; if (process.env.DB === 'MEM' && !user) { - user = { id: 'guest', email: 'guest', userDocumentId: '__guest__' }; + user = { id: 'guest', email: 'guest', userDocumentId: Utils.GuestID() }; } const core = { req, res, isRelease }; const tryExecute = async (toExecute: (args: any) => any | Promise<any>, args: any) => { diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index 4453b83bf..3940b7a3d 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -1,11 +1,11 @@ -import { google } from "googleapis"; -import { OAuth2Client, Credentials, OAuth2ClientOptions } from "google-auth-library"; -import { Opt } from "../../../fields/Doc"; -import { GaxiosResponse } from "gaxios"; -import request = require('request-promise'); -import * as qs from "query-string"; -import { Database } from "../../database"; -import { GoogleCredentialsLoader } from "./CredentialsLoader"; +import { google } from 'googleapis'; +import { OAuth2Client, Credentials, OAuth2ClientOptions } from 'google-auth-library'; +import { Opt } from '../../../fields/Doc'; +import { GaxiosResponse } from 'gaxios'; +import * as request from 'request-promise'; +import * as qs from 'query-string'; +import { Database } from '../../database'; +import { GoogleCredentialsLoader } from './CredentialsLoader'; /** * Scopes give Google users fine granularity of control @@ -13,33 +13,23 @@ import { GoogleCredentialsLoader } from "./CredentialsLoader"; * This is the somewhat overkill list of what Dash requests * from the user. */ -const scope = [ - 'documents.readonly', - 'documents', - 'presentations', - 'presentations.readonly', - 'drive', - 'drive.file', - 'photoslibrary', - 'photoslibrary.appendonly', - 'photoslibrary.sharing', - 'userinfo.profile' -].map(relative => `https://www.googleapis.com/auth/${relative}`); +const scope = ['documents.readonly', 'documents', 'presentations', 'presentations.readonly', 'drive', 'drive.file', 'photoslibrary', 'photoslibrary.appendonly', 'photoslibrary.sharing', 'userinfo.profile'].map( + relative => `https://www.googleapis.com/auth/${relative}` +); /** * This namespace manages server side authentication for Google API queries, either * from the standard v1 APIs or the Google Photos REST API. */ export namespace GoogleApiServerUtils { - /** * As we expand out to more Google APIs that are accessible from * the 'googleapis' module imported above, this enum will record * the list and provide a unified string representation of each API. */ export enum Service { - Documents = "Documents", - Slides = "Slides", + Documents = 'Documents', + Slides = 'Slides', } /** @@ -51,7 +41,7 @@ export namespace GoogleApiServerUtils { let oAuthOptions: OAuth2ClientOptions; /** - * This is a global authorization client that is never + * This is a global authorization client that is never * passed around, and whose credentials are never set. * Its job is purely to generate new authentication urls * (users will follow to get to Google's permissions GUI) @@ -64,7 +54,7 @@ export namespace GoogleApiServerUtils { * This function is called once before the server is started, * reading in Dash's project-specific credentials (client secret * and client id) for later repeated access. It also sets up the - * global, intentionally unauthenticated worker OAuth2 client instance. + * global, intentionally unauthenticated worker OAuth2 client instance. */ export function processProjectCredentials(): void { const { client_secret, client_id, redirect_uris } = GoogleCredentialsLoader.ProjectCredentials; @@ -72,7 +62,7 @@ export namespace GoogleApiServerUtils { oAuthOptions = { clientId: client_id, clientSecret: client_secret, - redirectUri: redirect_uris[0] + redirectUri: redirect_uris[0], }; worker = generateClient(); } @@ -98,7 +88,7 @@ export namespace GoogleApiServerUtils { * A literal union type indicating the valid actions for these 'googleapis' * requestions */ - export type Action = "create" | "retrieve" | "update"; + export type Action = 'create' | 'retrieve' | 'update'; /** * An interface defining any entity on which one can invoke @@ -135,7 +125,7 @@ export namespace GoogleApiServerUtils { return resolve(); } let routed: Opt<Endpoint>; - const parameters: any = { auth, version: "v1" }; + const parameters: any = { auth, version: 'v1' }; switch (sector) { case Service.Documents: routed = google.docs(parameters).documents; @@ -165,7 +155,7 @@ export namespace GoogleApiServerUtils { } let client = authenticationClients.get(userId); if (!client) { - authenticationClients.set(userId, client = generateClient(credentials)); + authenticationClients.set(userId, (client = generateClient(credentials))); } else if (refreshed) { client.setCredentials(credentials); } @@ -206,11 +196,11 @@ export namespace GoogleApiServerUtils { * with a Dash user in the googleAuthentication table of the database. * @param authenticationCode the Google-provided authentication code that the user copied * from Google's permissions UI and pasted into the overlay. - * + * * EXAMPLE CODE: 4/sgF2A5uGg4xASHf7VQDnLtdqo3mUlfQqLSce_HYz5qf1nFtHj9YTeGs - * + * * @returns the information necessary to authenticate a client side google photos request - * and display basic user information in the overlay on successful authentication. + * and display basic user information in the overlay on successful authentication. * This can be expanded as needed by adding properties to the interface GoogleAuthenticationResult. */ export async function processNewUser(userId: string, authenticationCode: string): Promise<EnrichedCredentials> { @@ -231,7 +221,7 @@ export namespace GoogleApiServerUtils { /** * This type represents the union of the full set of OAuth2 credentials * and all of a Google user's publically available information. This is the strucure - * of the JSON object we ultimately store in the googleAuthentication table of the database. + * of the JSON object we ultimately store in the googleAuthentication table of the database. */ export type EnrichedCredentials = Credentials & { userInfo: UserInfo }; @@ -259,14 +249,14 @@ export namespace GoogleApiServerUtils { * It's pretty cool: the credentials id_token is split into thirds by periods. * The middle third contains a base64-encoded JSON string with all the * user info contained in the interface below. So, we isolate that middle third, - * base64 decode with atob and parse the JSON. + * base64 decode with atob and parse the JSON. * @param credentials the client credentials returned from OAuth after the user * has executed the authentication routine * @returns the full set of credentials in the structure in which they'll be stored * in the database. */ function injectUserInfo(credentials: Credentials): EnrichedCredentials { - const userInfo: UserInfo = JSON.parse(atob(credentials.id_token!.split(".")[1])); + const userInfo: UserInfo = JSON.parse(atob(credentials.id_token!.split('.')[1])); return { ...credentials, userInfo }; } @@ -279,7 +269,7 @@ export namespace GoogleApiServerUtils { * @returns the credentials, or undefined if the user has no stored associated credentials, * and a flag indicating whether or not they were refreshed during retrieval */ - export async function retrieveCredentials(userId: string): Promise<{ credentials: Opt<EnrichedCredentials>, refreshed: boolean }> { + export async function retrieveCredentials(userId: string): Promise<{ credentials: Opt<EnrichedCredentials>; refreshed: boolean }> { let credentials = await Database.Auxiliary.GoogleAccessToken.Fetch(userId); let refreshed = false; if (!credentials) { @@ -299,7 +289,7 @@ export namespace GoogleApiServerUtils { * the Dash user id passed in. In addition to returning the credentials, it * writes the diff to the database. * @param credentials the credentials - * @param userId the id of the Dash user implicitly requesting that + * @param userId the id of the Dash user implicitly requesting that * his/her credentials be refreshed * @returns the updated credentials */ @@ -310,19 +300,18 @@ export namespace GoogleApiServerUtils { refreshToken: credentials.refresh_token, client_id, client_secret, - grant_type: "refresh_token" + grant_type: 'refresh_token', })}`; const { access_token, expires_in } = await new Promise<any>(async resolve => { const response = await request.post(url, headerParameters); resolve(JSON.parse(response)); }); // expires_in is in seconds, but we're building the new expiry date in milliseconds - const expiry_date = new Date().getTime() + (expires_in * 1000); + const expiry_date = new Date().getTime() + expires_in * 1000; await Database.Auxiliary.GoogleAccessToken.Update(userId, access_token, expiry_date); // update the relevant properties credentials.access_token = access_token; credentials.expiry_date = expiry_date; return credentials; } - -}
\ No newline at end of file +} diff --git a/src/server/authentication/AuthenticationManager.ts b/src/server/authentication/AuthenticationManager.ts index 52d876e95..b1b84c300 100644 --- a/src/server/authentication/AuthenticationManager.ts +++ b/src/server/authentication/AuthenticationManager.ts @@ -3,12 +3,12 @@ import { Request, Response, NextFunction } from 'express'; import * as passport from 'passport'; import { IVerifyOptions } from 'passport-local'; import './Passport'; -import flash = require('express-flash'); import * as async from 'async'; import * as nodemailer from 'nodemailer'; -import c = require('crypto'); +import * as c from 'crypto'; import { emptyFunction, Utils } from '../../Utils'; import { MailOptions } from 'nodemailer/lib/stream-transport'; +import { check, validationResult } from 'express-validator'; /** * GET /signup @@ -31,14 +31,14 @@ export let getSignup = (req: Request, res: Response) => { */ export let postSignup = (req: Request, res: Response, next: NextFunction) => { const email = req.body.email as String; - req.assert('email', 'Email is not valid').isEmail(); - req.assert('password', 'Password must be at least 4 characters long').len({ min: 4 }); - req.assert('confirmPassword', 'Passwords do not match').equals(req.body.password); - req.sanitize('email').normalizeEmail({ gmail_remove_dots: false }); + check('email', 'Email is not valid').isEmail().run(req); + check('password', 'Password must be at least 4 characters long').isLength({ min: 4 }).run(req); + check('confirmPassword', 'Passwords do not match').equals(req.body.password).run(req); + check('email').normalizeEmail({ gmail_remove_dots: false }).run(req); - const errors = req.validationErrors(); + const errors = validationResult(req).array(); - if (errors) { + if (errors.length) { return res.redirect('/signup'); } @@ -47,7 +47,7 @@ export let postSignup = (req: Request, res: Response, next: NextFunction) => { const model = { email, password, - userDocumentId: email === 'guest' ? '__guest__' : Utils.GenerateGuid(), + userDocumentId: email === 'guest' ? Utils.GuestID() : Utils.GenerateGuid(), sharingDocumentId: email === 'guest' ? 2 : Utils.GenerateGuid(), linkDatabaseId: email === 'guest' ? 3 : Utils.GenerateGuid(), cacheDocumentIds: '', @@ -55,25 +55,21 @@ export let postSignup = (req: Request, res: Response, next: NextFunction) => { const user = new User(model); - User.findOne({ email }, (err: any, existingUser: any) => { - if (err) { - return next(err); - } - if (existingUser) { - return res.redirect('/login'); - } - user.save((err: any) => { - if (err) { - return next(err); + User.findOne({ email }) + .then(existingUser => { + if (existingUser) { + return res.redirect('/login'); } - req.logIn(user, err => { - if (err) { - return next(err); - } - tryRedirectToTarget(req, res); - }); - }); - }); + user.save() + .then(() => { + req.logIn(user, err => { + if (err) return next(err); + tryRedirectToTarget(req, res); + }); + }) + .catch(err => next(err)); + }) + .catch(err => next(err)); }; const tryRedirectToTarget = (req: Request, res: Response) => { @@ -107,16 +103,18 @@ export let getLogin = (req: Request, res: Response) => { */ export let postLogin = (req: Request, res: Response, next: NextFunction) => { if (req.body.email === '') { - User.findOne({ email: 'guest' }, (err: any, user: DashUserModel) => !user && initializeGuest()); + User.findOne({ email: 'guest' }) + .then(user => !user && initializeGuest()) + .catch(err => err); req.body.email = 'guest'; req.body.password = 'guest'; } else { - req.assert('email', 'Email is not valid').isEmail(); - req.assert('password', 'Password cannot be blank').notEmpty(); - req.sanitize('email').normalizeEmail({ gmail_remove_dots: false }); + check('email', 'Email is not valid').isEmail().run(req); + check('password', 'Password cannot be blank').notEmpty().run(req); + check('email').normalizeEmail({ gmail_remove_dots: false }).run(req); } - if (req.validationErrors()) { + if (validationResult(req).array().length) { req.flash('errors', 'Unable to login at this time. Please try again.'); return res.redirect('/signup'); } @@ -146,16 +144,10 @@ export let postLogin = (req: Request, res: Response, next: NextFunction) => { * and destroys the user's current session. */ export let getLogout = (req: Request, res: Response) => { - req.logout(emptyFunction); - const sess = req.session; - if (sess) { - sess.destroy(err => { - if (err) { - console.log(err); - } - }); - } - res.redirect('/login'); + req.logout(err => { + if (err) console.log(err); + else res.redirect('/login'); + }); }; export let getForgot = function (req: Request, res: Response) { @@ -179,7 +171,7 @@ export let postForgot = function (req: Request, res: Response, next: NextFunctio }); }, function (token: string, done: any) { - User.findOne({ email }, function (err: any, user: DashUserModel) { + User.findOne({ email }).then(user => { if (!user) { // NO ACCOUNT WITH SUBMITTED EMAIL res.redirect('/forgotPassword'); @@ -187,9 +179,7 @@ export let postForgot = function (req: Request, res: Response, next: NextFunctio } user.passwordResetToken = token; user.passwordResetExpires = new Date(Date.now() + 3600000); // 1 HOUR - user.save(function (err: any) { - done(null, token, user); - }); + user.save().then(() => done(null, token, user)); }); }, function (token: Uint16Array, user: DashUserModel, done: any) { @@ -228,50 +218,43 @@ export let postForgot = function (req: Request, res: Response, next: NextFunctio }; export let getReset = function (req: Request, res: Response) { - User.findOne({ passwordResetToken: req.params.token, passwordResetExpires: { $gt: Date.now() } }, function (err: any, user: DashUserModel) { - if (!user || err) { - return res.redirect('/forgotPassword'); - } - res.render('reset.pug', { - title: 'Reset Password', - user: req.user, - }); - }); + User.findOne({ passwordResetToken: req.params.token, passwordResetExpires: { $gt: Date.now() } }) + .then(user => { + if (!user) return res.redirect('/forgotPassword'); + res.render('reset.pug', { + title: 'Reset Password', + user: req.user, + }); + }) + .catch(err => res.redirect('/forgotPassword')); }; export let postReset = function (req: Request, res: Response) { async.waterfall( [ function (done: any) { - User.findOne({ passwordResetToken: req.params.token, passwordResetExpires: { $gt: Date.now() } }, function (err: any, user: DashUserModel) { - if (!user || err) { - return res.redirect('back'); - } + User.findOne({ passwordResetToken: req.params.token, passwordResetExpires: { $gt: Date.now() } }) + .then(user => { + if (!user) return res.redirect('back'); - req.assert('password', 'Password must be at least 4 characters long').len({ min: 4 }); - req.assert('confirmPassword', 'Passwords do not match').equals(req.body.password); + check('password', 'Password must be at least 4 characters long').isLength({ min: 4 }).run(req); + check('confirmPassword', 'Passwords do not match').equals(req.body.password).run(req); - if (req.validationErrors()) { - return res.redirect('back'); - } + if (validationResult(req).array().length) return res.redirect('back'); - user.password = req.body.password; - user.passwordResetToken = undefined; - user.passwordResetExpires = undefined; + user.password = req.body.password; + user.passwordResetToken = undefined; + user.passwordResetExpires = undefined; - user.save(function (err) { - if (err) { - res.redirect('/login'); - return; - } - req.logIn(user, function (err) { - if (err) { - return; - } - }); + user.save() + .then( + () => (req as any).logIn(user), + (err: any) => err + ) + .catch(err => res.redirect('/login')); done(null, user); - }); - }); + }) + .catch(err => res.redirect('back')); }, function (user: DashUserModel, done: any) { const smtpTransport = nodemailer.createTransport({ @@ -287,9 +270,8 @@ export let postReset = function (req: Request, res: Response) { subject: 'Your password has been changed', text: 'Hello,\n\n' + 'This is a confirmation that the password for your account ' + user.email + ' has just been changed.\n', } as MailOptions; - smtpTransport.sendMail(mailOptions, function (err) { - done(null, err); - }); + + smtpTransport.sendMail(mailOptions, err => done(null, err)); }, ], function (err) { diff --git a/src/server/authentication/DashUserModel.ts b/src/server/authentication/DashUserModel.ts index a1883beab..dbb7a79ed 100644 --- a/src/server/authentication/DashUserModel.ts +++ b/src/server/authentication/DashUserModel.ts @@ -2,6 +2,7 @@ import * as bcrypt from 'bcrypt-nodejs'; //@ts-ignore import * as mongoose from 'mongoose'; +import { Utils } from '../../Utils'; export type DashUserModel = mongoose.Document & { email: String; @@ -25,7 +26,7 @@ export type DashUserModel = mongoose.Document & { comparePassword: comparePasswordFunction; }; -type comparePasswordFunction = (candidatePassword: string, cb: (err: any, isMatch: any) => {}) => void; +type comparePasswordFunction = (candidatePassword: string, cb: (err: any, isMatch: any) => void) => void; export type AuthToken = { accessToken: string; @@ -63,7 +64,7 @@ const userSchema = new mongoose.Schema( * Password hash middleware. */ userSchema.pre('save', function save(next) { - const user = this as DashUserModel; + const user = this as any as DashUserModel; if (!user.isModified('password')) { return next(); } @@ -101,7 +102,7 @@ export function initializeGuest() { new User({ email: 'guest', password: 'guest', - userDocumentId: '__guest__', + userDocumentId: Utils.GuestID(), sharingDocumentId: '2', linkDatabaseId: '3', cacheDocumentIds: '', diff --git a/src/server/authentication/Passport.ts b/src/server/authentication/Passport.ts index d7f891c34..a9cf6698b 100644 --- a/src/server/authentication/Passport.ts +++ b/src/server/authentication/Passport.ts @@ -1,6 +1,6 @@ import * as passport from 'passport'; import * as passportLocal from 'passport-local'; -import { default as User } from './DashUserModel'; +import { DashUserModel, default as User } from './DashUserModel'; const LocalStrategy = passportLocal.Strategy; @@ -9,21 +9,24 @@ passport.serializeUser<any, any>((req, user, done) => { }); passport.deserializeUser<any, any>((id, done) => { - User.findById(id, (err: any, user: any) => { - done(err, user); - }); + User.findById(id) + .exec() + .then(user => done(undefined, user)); }); // AUTHENTICATE JUST WITH EMAIL AND PASSWORD -passport.use(new LocalStrategy({ usernameField: 'email', passReqToCallback: true }, (req, email, password, done) => { - User.findOne({ email: email.toLowerCase() }, (error: any, user: any) => { - if (error) return done(error); - if (!user) return done(undefined, false, { message: "Invalid email or password" }); // invalid email - user.comparePassword(password, (error: Error, isMatch: boolean) => { - if (error) return done(error); - if (!isMatch) return done(undefined, false, { message: "Invalid email or password" }); // invalid password - // valid authentication HERE - return done(undefined, user); - }); - }); -}));
\ No newline at end of file +passport.use( + new LocalStrategy({ usernameField: 'email', passReqToCallback: true }, (req, email, password, done) => { + User.findOne({ email: email.toLowerCase() }) + .then(user => { + if (!user) return done(undefined, false, { message: 'Invalid email or password' }); // invalid email + (user as any as DashUserModel).comparePassword(password, (error: Error, isMatch: boolean) => { + if (error) return done(error); + if (!isMatch) return done(undefined, false, { message: 'Invalid email or password' }); // invalid password + // valid authentication HERE + return done(undefined, user); + }); + }) + .catch(error => done(error)); + }) +); diff --git a/src/server/database.ts b/src/server/database.ts index 725b66836..3a087ce38 100644 --- a/src/server/database.ts +++ b/src/server/database.ts @@ -9,8 +9,12 @@ import { Transferable } from './Message'; import { Upload } from './SharedMediaTypes'; export namespace Database { - export let disconnect: Function; + + class DocSchema implements mongodb.BSON.Document { + _id!: string; + id!: string; + } const schema = 'Dash'; const port = 27017; export const url = `mongodb://localhost:${port}/${schema}`; @@ -26,7 +30,7 @@ export namespace Database { export async function tryInitializeConnection() { try { const { connection } = mongoose; - disconnect = async () => new Promise<any>(resolve => connection.close(resolve)); + disconnect = async () => new Promise<any>(resolve => connection.close().then(resolve)); if (connection.readyState === ConnectionStates.disconnected) { await new Promise<void>((resolve, reject) => { connection.on('error', reject); @@ -35,12 +39,11 @@ export namespace Database { resolve(); }); mongoose.connect(url, { - useNewUrlParser: true, - useUnifiedTopology: true, + //useNewUrlParser: true, dbName: schema, // reconnectTries: Number.MAX_VALUE, // reconnectInterval: 1000, - }); + }); }); } } catch (e) { @@ -60,11 +63,10 @@ export namespace Database { async doConnect() { console.error(`\nConnecting to Mongo with URL : ${url}\n`); return new Promise<void>(resolve => { - this.MongoClient.connect(url, { connectTimeoutMS: 30000, socketTimeoutMS: 30000, useUnifiedTopology: true }, (_err, client) => { - console.error("mongo connect response\n"); + this.MongoClient.connect(url, { connectTimeoutMS: 30000, socketTimeoutMS: 30000 }).then(client => { + console.error('mongo connect response\n'); if (!client) { - console.error("\nMongo connect failed with the error:\n"); - console.log(_err); + console.error('\nMongo connect failed with the error:\n'); process.exit(0); } this.db = client.db(); @@ -74,20 +76,24 @@ export namespace Database { }); } - public async update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = DocumentsCollection) { + public async update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateResult) => void, upsert = true, collectionName = DocumentsCollection) { if (this.db) { - const collection = this.db.collection(collectionName); + const collection = this.db.collection<DocSchema>(collectionName); const prom = this.currentWrites[id]; let newProm: Promise<void>; const run = (): Promise<void> => { return new Promise<void>(resolve => { - collection.updateOne({ _id: id }, value, { upsert } - , (err, res) => { + collection + .updateOne({ _id: id }, value, { upsert }) + .then(res => { if (this.currentWrites[id] === newProm) { delete this.currentWrites[id]; } resolve(); - callback(err, res); + callback(undefined as any, res); + }) + .catch(error => { + console.log('MOngo UPDATE ONE ERROR:', error); }); }); }; @@ -99,21 +105,20 @@ export namespace Database { } } - public replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = DocumentsCollection) { + public replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateResult<mongodb.Document>) => void, upsert = true, collectionName = DocumentsCollection) { if (this.db) { - const collection = this.db.collection(collectionName); + const collection = this.db.collection<DocSchema>(collectionName); const prom = this.currentWrites[id]; let newProm: Promise<void>; const run = (): Promise<void> => { return new Promise<void>(resolve => { - collection.replaceOne({ _id: id }, value, { upsert } - , (err, res) => { - if (this.currentWrites[id] === newProm) { - delete this.currentWrites[id]; - } - resolve(); - callback(err, res); - }); + collection.replaceOne({ _id: id }, value, { upsert }).then(res => { + if (this.currentWrites[id] === newProm) { + delete this.currentWrites[id]; + } + resolve(); + callback(undefined as any, res as any); + }); }); }; newProm = prom ? prom.then(run) : run(); @@ -135,15 +140,20 @@ export namespace Database { return collectionNames; } - public delete(query: any, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; - public delete(id: string, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; + public delete(query: any, collectionName?: string): Promise<mongodb.DeleteResult>; + public delete(id: string, collectionName?: string): Promise<mongodb.DeleteResult>; public delete(id: any, collectionName = DocumentsCollection) { - if (typeof id === "string") { + if (typeof id === 'string') { id = { _id: id }; } if (this.db) { const db = this.db; - return new Promise(res => db.collection(collectionName).deleteMany(id, (err, result) => res(result))); + return new Promise(res => + db + .collection(collectionName) + .deleteMany(id) + .then(result => res(result)) + ); } else { return new Promise(res => this.onConnect.push(() => res(this.delete(id, collectionName)))); } @@ -170,22 +180,27 @@ export namespace Database { public async insert(value: any, collectionName = DocumentsCollection) { if (this.db && value !== null) { - if ("id" in value) { + if ('id' in value) { value._id = value.id; delete value.id; } const id = value._id; - const collection = this.db.collection(collectionName); + const collection = this.db.collection<DocSchema>(collectionName); const prom = this.currentWrites[id]; let newProm: Promise<void>; const run = (): Promise<void> => { return new Promise<void>(resolve => { - collection.insertOne(value, (err, res) => { - if (this.currentWrites[id] === newProm) { - delete this.currentWrites[id]; - } - resolve(); - }); + collection + .insertOne(value) + .then(res => { + if (this.currentWrites[id] === newProm) { + delete this.currentWrites[id]; + } + resolve(); + }) + .catch(err => { + console.log('Mongo INSERT ERROR: ', err); + }); }); }; newProm = prom ? prom.then(run) : run(); @@ -198,11 +213,12 @@ export namespace Database { public getDocument(id: string, fn: (result?: Transferable) => void, collectionName = DocumentsCollection) { if (this.db) { - this.db.collection(collectionName).findOne({ _id: id }, (err, result) => { + const collection = this.db.collection<DocSchema>(collectionName); + collection.findOne({ _id: id }).then(result => { if (result) { result.id = result._id; - delete result._id; - fn(result); + //delete result._id; + fn(result as any); } else { fn(undefined); } @@ -212,19 +228,19 @@ export namespace Database { } } - public getDocuments(ids: string[], fn: (result: Transferable[]) => void, collectionName = DocumentsCollection) { + public async getDocuments(ids: string[], fn: (result: Transferable[]) => void, collectionName = DocumentsCollection) { if (this.db) { - this.db.collection(collectionName).find({ _id: { "$in": ids } }).toArray((err, docs) => { - if (err) { - console.log(err.message); - console.log(err.errmsg); - } - fn(docs.map(doc => { + const found = await this.db + .collection<DocSchema>(collectionName) + .find({ _id: { $in: ids } }) + .toArray(); + fn( + found.map((doc: any) => { doc.id = doc._id; delete doc._id; return doc; - })); - }); + }) + ); } else { this.onConnect.push(() => this.getDocuments(ids, fn, collectionName)); } @@ -257,15 +273,15 @@ export namespace Database { } } - public query(query: { [key: string]: any }, projection?: { [key: string]: 0 | 1 }, collectionName = DocumentsCollection): Promise<mongodb.Cursor> { + public query(query: { [key: string]: any }, projection?: { [key: string]: 0 | 1 }, collectionName = DocumentsCollection): Promise<mongodb.FindCursor> { if (this.db) { - let cursor = this.db.collection(collectionName).find(query); + let cursor = this.db.collection<DocSchema>(collectionName).find(query); if (projection) { cursor = cursor.project(projection); } - return Promise.resolve<mongodb.Cursor>(cursor); + return Promise.resolve<mongodb.FindCursor>(cursor); } else { - return new Promise<mongodb.Cursor>(res => { + return new Promise<mongodb.FindCursor>(res => { this.onConnect.push(() => res(this.query(query, projection, collectionName))); }); } @@ -274,22 +290,36 @@ export namespace Database { public updateMany(query: any, update: any, collectionName = DocumentsCollection) { if (this.db) { const db = this.db; - return new Promise<mongodb.WriteOpResult>(res => db.collection(collectionName).update(query, update, (_, result) => res(result))); + return new Promise<mongodb.UpdateResult>(res => + db + .collection(collectionName) + .updateMany(query, update) + .then(result => res(result)) + .catch(error => { + console.log('Mongo INSERT MANY ERROR:', error); + }) + ); } else { - return new Promise<mongodb.WriteOpResult>(res => { - this.onConnect.push(() => this.updateMany(query, update, collectionName).then(res)); + return new Promise<mongodb.UpdateResult>(res => { + this.onConnect.push(() => + this.updateMany(query, update, collectionName) + .then(res) + .catch(error => { + console.log('Mongo UPDATAE MANY ERROR: ', error); + }) + ); }); } } public print() { - console.log("db says hi!"); + console.log('db says hi!'); } } function getDatabase() { switch (process.env.DB) { - case "MEM": + case 'MEM': return new MemoryDatabase(); default: return new Database(); @@ -304,13 +334,12 @@ export namespace Database { * or Dash-internal user data. */ export namespace Auxiliary { - /** * All the auxiliary MongoDB collections (schemas) */ export enum AuxiliaryCollections { - GooglePhotosUploadHistory = "uploadedFromGooglePhotos", - GoogleAccess = "googleAuthentication", + GooglePhotosUploadHistory = 'uploadedFromGooglePhotos', + GoogleAccess = 'googleAuthentication', } /** @@ -322,16 +351,18 @@ export namespace Database { const cursor = await Instance.query(query, undefined, collection); const results = await cursor.toArray(); const slice = results.slice(0, Math.min(cap, results.length)); - return removeId ? slice.map(result => { - delete result._id; - return result; - }) : slice; + return removeId + ? slice.map((result: any) => { + delete result._id; + return result; + }) + : slice; }; /** * Searches for the @param query in the specified @param collection, * and returns at most the first result. If @param removeId is true, - * as it is by default, each object will be stripped of its database id. + * as it is by default, each object will be stripped of its database id. * Worth the special case since it converts the Array return type to a single * object of the specified type. */ @@ -341,7 +372,7 @@ export namespace Database { }; /** - * Checks to see if an image with the given @param contentSize + * Checks to see if an image with the given @param contentSize * already exists in the aux database, i.e. has already been downloaded from Google Photos. */ export const QueryUploadHistory = async (contentSize: number) => { @@ -355,7 +386,7 @@ export namespace Database { export const LogUpload = async (information: Upload.ImageInformation) => { const bundle = { _id: Utils.GenerateDeterministicGuid(String(information.contentSize)), - ...information + ...information, }; return Instance.insert(bundle, AuxiliaryCollections.GooglePhotosUploadHistory); }; @@ -365,7 +396,6 @@ export namespace Database { * facilitates interactions with all their APIs for a given account. */ export namespace GoogleAccessToken { - /** * Format stored in database. */ @@ -373,7 +403,7 @@ export namespace Database { /** * Retrieves the credentials associaed with @param userId - * and optionally removes their database id according to @param removeId. + * and optionally removes their database id according to @param removeId. */ export const Fetch = async (userId: string, removeId = true): Promise<Opt<StoredCredentials>> => { return SanitizedSingletonQuery<StoredCredentials>({ userId }, AuxiliaryCollections.GoogleAccess, removeId); @@ -381,7 +411,7 @@ export namespace Database { /** * Writes the @param enrichedCredentials to the database, associated - * with @param userId for later retrieval and updating. + * with @param userId for later retrieval and updating. */ export const Write = async (userId: string, enrichedCredentials: GoogleApiServerUtils.EnrichedCredentials) => { return Instance.insert({ userId, canAccess: [], ...enrichedCredentials }, AuxiliaryCollections.GoogleAccess); @@ -400,7 +430,7 @@ export namespace Database { }; /** - * Revokes the credentials associated with @param userId. + * Revokes the credentials associated with @param userId. */ export const Revoke = async (userId: string) => { const entry = await Fetch(userId, false); @@ -408,9 +438,6 @@ export namespace Database { Instance.delete({ _id: entry._id }, AuxiliaryCollections.GoogleAccess); } }; - } - } - } diff --git a/src/server/downsize.ts b/src/server/downsize.ts deleted file mode 100644 index 382994e2d..000000000 --- a/src/server/downsize.ts +++ /dev/null @@ -1,40 +0,0 @@ -import * as fs from 'fs'; -import * as sharp from 'sharp'; - -const folder = "./src/server/public/files/"; -const pngTypes = ["png", "PNG"]; -const jpgTypes = ["jpg", "JPG", "jpeg", "JPEG"]; -const smallResizer = sharp().resize(100); -fs.readdir(folder, async (err, files) => { - if (err) { - console.log("readdir:" + err); - return; - } - // files.forEach(file => { - // if (file.includes("_s") || file.includes("_m") || file.includes("_l")) { - // fs.unlink(folder + file, () => { }); - // } - // }); - for (const file of files) { - const filesplit = file.split("."); - const resizers = [ - { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, - { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: "_m" }, - { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: "_l" }, - ]; - if (pngTypes.some(type => file.endsWith(type))) { - resizers.forEach(element => { - element.resizer = element.resizer.png(); - }); - } else if (jpgTypes.some(type => file.endsWith(type))) { - resizers.forEach(element => { - element.resizer = element.resizer.jpeg(); - }); - } else { - continue; - } - resizers.forEach(resizer => { - fs.createReadStream(folder + file).pipe(resizer.resizer).pipe(fs.createWriteStream(folder + filesplit[0] + resizer.suffix + "." + filesplit[1])); - }); - } -});
\ No newline at end of file diff --git a/src/server/index.ts b/src/server/index.ts index 8b2e18847..47c37c9f0 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -1,4 +1,4 @@ -require('dotenv').config(); +import * as dotenv from 'dotenv'; import { yellow } from 'colors'; import * as mobileDetect from 'mobile-detect'; import * as path from 'path'; @@ -8,8 +8,7 @@ import DataVizManager from './ApiManagers/DataVizManager'; import DeleteManager from './ApiManagers/DeleteManager'; import DownloadManager from './ApiManagers/DownloadManager'; import GeneralGoogleManager from './ApiManagers/GeneralGoogleManager'; -import GooglePhotosManager from './ApiManagers/GooglePhotosManager'; -import PDFManager from './ApiManagers/PDFManager'; +//import GooglePhotosManager from './ApiManagers/GooglePhotosManager'; import { SearchManager } from './ApiManagers/SearchManager'; import SessionManager from './ApiManagers/SessionManager'; import UploadManager from './ApiManagers/UploadManager'; @@ -26,7 +25,7 @@ import { Logger } from './ProcessFactory'; import RouteManager, { Method, PublicHandler } from './RouteManager'; import RouteSubscriber from './RouteSubscriber'; import initializeServer, { resolvedPorts } from './server_Initialization'; - +dotenv.config(); export const AdminPriviliges: Map<string, boolean> = new Map(); export const onWindows = process.platform === 'win32'; export let sessionAgent: AppliedSessionAgent; @@ -64,19 +63,7 @@ async function preliminaryFunctions() { * with the server */ function routeSetter({ isRelease, addSupervisedRoute, logRegistrationOutcome }: RouteManager) { - const managers = [ - new SessionManager(), - new UserManager(), - new UploadManager(), - new DownloadManager(), - new SearchManager(), - new PDFManager(), - new DeleteManager(), - new UtilManager(), - new GeneralGoogleManager(), - new GooglePhotosManager(), - new DataVizManager(), - ]; + const managers = [new SessionManager(), new UserManager(), new UploadManager(), new DownloadManager(), new SearchManager(), new DeleteManager(), new UtilManager(), new GeneralGoogleManager(), /* new GooglePhotosManager(),*/ new DataVizManager()]; // initialize API Managers console.log(yellow('\nregistering server routes...')); diff --git a/src/server/server_Initialization.ts b/src/server/server_Initialization.ts index ccb709453..afc6231e5 100644 --- a/src/server/server_Initialization.ts +++ b/src/server/server_Initialization.ts @@ -1,13 +1,9 @@ import * as bodyParser from 'body-parser'; import { blue, yellow } from 'colors'; -import * as cookieParser from 'cookie-parser'; import * as cors from 'cors'; import * as express from 'express'; import * as session from 'express-session'; -import * as expressValidator from 'express-validator'; -import * as fs from 'fs'; -import { Server as HttpServer } from 'http'; -import { createServer, Server as HttpsServer } from 'https'; +import { createServer } from 'https'; import * as passport from 'passport'; import * as request from 'request'; import * as webpack from 'webpack'; @@ -22,12 +18,11 @@ import { Database } from './database'; import RouteManager from './RouteManager'; import RouteSubscriber from './RouteSubscriber'; import { WebSocket } from './websocket'; -import brotli = require('brotli'); -import expressFlash = require('express-flash'); -import flash = require('connect-flash'); -const MongoStore = require('connect-mongo')(session); -const config = require('../../webpack.config'); -const compiler = webpack(config); +import * as expressFlash from 'express-flash'; +import * as flash from 'connect-flash'; +import * as brotli from 'brotli'; +import * as MongoStoreConnect from 'connect-mongo'; +import * as config from '../../webpack.config'; /* RouteSetter is a wrapper around the server that prevents the server from being exposed. */ @@ -40,32 +35,23 @@ export let resolvedServerUrl: string; export default async function InitializeServer(routeSetter: RouteSetter) { const isRelease = determineEnvironment(); const app = buildWithMiddleware(express()); - - const compiler = webpack(config); - - app.use( - require('webpack-dev-middleware')(compiler, { - publicPath: config.output.publicPath, - }) - ); - - app.use(require('webpack-hot-middleware')(compiler)); + const compiler = webpack(config as any); // route table managed by express. routes are tested sequentially against each of these map rules. when a match is found, the handler is called to process the request + app.use(wdm(compiler, { publicPath: config.output.publicPath })); + app.use(whm(compiler)); app.get(new RegExp(/^\/+$/), (req, res) => res.redirect(req.user ? '/home' : '/login')); // target urls that consist of one or more '/'s with nothing in between app.use(express.static(publicDirectory, { setHeaders: res => res.setHeader('Access-Control-Allow-Origin', '*') })); //all urls that start with dash's public directory: /files/ (e.g., /files/images, /files/audio, etc) app.use(cors({ origin: (_origin: any, callback: any) => callback(null, true) })); - app.use(wdm(compiler, { publicPath: config.output.publicPath })); - app.use(whm(compiler)); registerAuthenticationRoutes(app); // this adds routes to authenticate a user (login, etc) registerCorsProxy(app); // this adds a /corsProxy/ route to allow clients to get to urls that would otherwise be blocked by cors policies isRelease && !SSL.Loaded && SSL.exit(); routeSetter(new RouteManager(app, isRelease)); // this sets up all the regular supervised routes (things like /home, download/upload api's, pdf, search, session, etc) registerEmbeddedBrowseRelativePathHandler(app); // this allows renered web pages which internally have relative paths to find their content - let server: HttpServer | HttpsServer; isRelease && process.env.serverPort && (resolvedPorts.server = Number(process.env.serverPort)); - await new Promise<void>(resolve => (server = isRelease ? createServer(SSL.Credentials, app).listen(resolvedPorts.server, resolve) : app.listen(resolvedPorts.server, resolve))); + const server = isRelease ? createServer(SSL.Credentials, app) : app; + await new Promise<void>(resolve => server.listen(resolvedPorts.server, resolve)); logPort('server', resolvedPorts.server); resolvedServerUrl = `${isRelease && process.env.serverName ? `https://${process.env.serverName}.com` : 'http://localhost'}:${resolvedPorts.server}`; @@ -80,26 +66,26 @@ export default async function InitializeServer(routeSetter: RouteSetter) { const week = 7 * 24 * 60 * 60 * 1000; const secret = '64d6866242d3b5a5503c675b32c9605e4e90478e9b77bcf2bc'; +const store = process.env.DB === 'MEM' ? new session.MemoryStore() : MongoStoreConnect.create({ mongoUrl: Database.url }); function buildWithMiddleware(server: express.Express) { [ - cookieParser(), session({ secret, - resave: true, + resave: false, cookie: { maxAge: week }, saveUninitialized: true, - store: process.env.DB === 'MEM' ? new session.MemoryStore() : new MongoStore({ url: Database.url }), + store, }), flash(), expressFlash(), bodyParser.json({ limit: '10mb' }), bodyParser.urlencoded({ extended: true }), - expressValidator(), passport.initialize(), passport.session(), (req: express.Request, res: express.Response, next: express.NextFunction) => { res.locals.user = req.user; + // console.log('HEADER:' + req.originalUrl + ' path = ' + req.path); if ((req.originalUrl.endsWith('.png') || req.originalUrl.endsWith('.jpg') || (process.env.RELEASE === 'true' && req.originalUrl.endsWith('.js'))) && req.method === 'GET') { const period = 30000; res.set('Cache-control', `public, max-age=${period}`); @@ -110,6 +96,7 @@ function buildWithMiddleware(server: express.Express) { next(); }, ].forEach(next => server.use(next)); + return server; } diff --git a/src/server/websocket.ts b/src/server/websocket.ts index be5cdb202..a26b81bdf 100644 --- a/src/server/websocket.ts +++ b/src/server/websocket.ts @@ -1,10 +1,8 @@ import { blue } from 'colors'; import * as express from 'express'; -import { createServer, Server } from 'https'; +import { createServer } from 'https'; +import { Server, Socket } from '../../node_modules/socket.io/dist/index'; import { networkInterfaces } from 'os'; -import * as sio from 'socket.io'; -import { Socket } from 'socket.io'; -import { Opt } from '../fields/Doc'; import { Utils } from '../Utils'; import { logPort } from './ActionUtilities'; import { timeMap } from './ApiManagers/UserManager'; @@ -18,31 +16,31 @@ import { DocumentsCollection } from './IDatabase'; import { Diff, GestureContent, MessageStore, MobileDocumentUploadContent, MobileInkOverlayContent, Transferable, Types, UpdateMobileInkOverlayPositionContent, YoutubeQueryInput, YoutubeQueryTypes } from './Message'; import { Search } from './Search'; import { resolvedPorts } from './server_Initialization'; -var _ = require('lodash'); +import * as _ from 'lodash'; export namespace WebSocket { export let _socket: Socket; export const clients: { [key: string]: Client } = {}; - export const socketMap = new Map<SocketIO.Socket, string>(); + export const socketMap = new Map<Socket, string>(); export const userOperations = new Map<string, number>(); export let disconnect: Function; export async function initialize(isRelease: boolean, app: express.Express) { - let io: sio.Server; + let io: Server; if (isRelease) { const { socketPort } = process.env; if (socketPort) { resolvedPorts.socket = Number(socketPort); } - let socketEndpoint: Opt<Server>; - await new Promise<void>(resolve => (socketEndpoint = createServer(SSL.Credentials, app).listen(resolvedPorts.socket, resolve))); - io = sio(socketEndpoint!, SSL.Credentials as any); + io = new Server(createServer(SSL.Credentials, app), SSL.Credentials as any); + io.listen(resolvedPorts.socket); } else { - io = sio().listen(resolvedPorts.socket); + io = new Server(); + io.listen(resolvedPorts.socket); } logPort('websocket', resolvedPorts.socket); - io.on('connection', function (socket: Socket) { + io.on('connection', socket => { _socket = socket; socket.use((_packet, next) => { const userEmail = socketMap.get(socket); @@ -67,8 +65,8 @@ export namespace WebSocket { socket.on('create or join', function (room) { console.log('Received request to create or join room ' + room); - const clientsInRoom = socket.adapter.rooms[room]; - const numClients = clientsInRoom ? Object.keys(clientsInRoom.sockets).length : 0; + const clientsInRoom = socket.rooms.has(room); + const numClients = clientsInRoom ? Object.keys(room.sockets).length : 0; console.log('Room ' + room + ' now has ' + numClients + ' client(s)'); if (numClients === 0) { @@ -90,7 +88,7 @@ export namespace WebSocket { socket.on('ipaddr', function () { const ifaces = networkInterfaces(); for (const dev in ifaces) { - ifaces[dev].forEach(function (details) { + ifaces[dev]?.forEach(function (details) { if (details.family === 'IPv4' && details.address !== '127.0.0.1') { socket.emit('ipaddr', details.address); } @@ -191,7 +189,7 @@ export namespace WebSocket { initializeGuest(); } - function barReceived(socket: SocketIO.Socket, userEmail: string) { + function barReceived(socket: Socket, userEmail: string) { clients[userEmail] = new Client(userEmail.toString()); const currentdate = new Date(); const datetime = currentdate.getDate() + '/' + (currentdate.getMonth() + 1) + '/' + currentdate.getFullYear() + ' @ ' + currentdate.getHours() + ':' + currentdate.getMinutes() + ':' + currentdate.getSeconds(); @@ -308,9 +306,9 @@ export namespace WebSocket { if (sendBack) { console.log('Warning: list modified during update. Composite list is being returned.'); const id = socket.id; - socket.id = ''; + (socket as any).id = ''; socket.broadcast.emit(MessageStore.UpdateField.Message, diff); - socket.id = id; + (socket as any).id = id; } else socket.broadcast.emit(MessageStore.UpdateField.Message, diff); dispatchNextOp(diff.id); }, @@ -401,9 +399,9 @@ export namespace WebSocket { // the two copies are different, so the server sends its copy. console.log('SEND BACK'); const id = socket.id; - socket.id = ''; + (socket as any).id = ''; socket.broadcast.emit(MessageStore.UpdateField.Message, diff); - socket.id = id; + (socket as any).id = id; } else socket.broadcast.emit(MessageStore.UpdateField.Message, diff); dispatchNextOp(diff.id); }, |