diff options
Diffstat (limited to 'src/server')
-rw-r--r-- | src/server/ActionUtilities.ts | 10 | ||||
-rw-r--r-- | src/server/ApiManagers/DeleteManager.ts | 2 | ||||
-rw-r--r-- | src/server/ApiManagers/UploadManager.ts | 32 | ||||
-rw-r--r-- | src/server/ApiManagers/UserManager.ts | 4 | ||||
-rw-r--r-- | src/server/DashSession/DashSessionAgent.ts | 2 | ||||
-rw-r--r-- | src/server/DashSession/Session/agents/applied_session_agent.ts | 9 | ||||
-rw-r--r-- | src/server/DashSession/Session/agents/monitor.ts | 15 | ||||
-rw-r--r-- | src/server/DashSession/Session/agents/server_worker.ts | 4 | ||||
-rw-r--r-- | src/server/DashUploadUtils.ts | 56 | ||||
-rw-r--r-- | src/server/GarbageCollector.ts | 4 | ||||
-rw-r--r-- | src/server/IDatabase.ts | 12 | ||||
-rw-r--r-- | src/server/MemoryDatabase.ts | 14 | ||||
-rw-r--r-- | src/server/ProcessFactory.ts | 6 | ||||
-rw-r--r-- | src/server/authentication/AuthenticationManager.ts | 9 | ||||
-rw-r--r-- | src/server/database.ts | 63 | ||||
-rw-r--r-- | src/server/server_Initialization.ts | 19 | ||||
-rw-r--r-- | src/server/websocket.ts | 7 |
17 files changed, 138 insertions, 130 deletions
diff --git a/src/server/ActionUtilities.ts b/src/server/ActionUtilities.ts index bc8fd6f87..55b50cc12 100644 --- a/src/server/ActionUtilities.ts +++ b/src/server/ActionUtilities.ts @@ -4,9 +4,9 @@ import { createWriteStream, exists, mkdir, readFile, unlink, writeFile } from 'f import * as nodemailer from "nodemailer"; import { MailOptions } from "nodemailer/lib/json-transport"; import * as path from 'path'; -import * as rimraf from "rimraf"; +import { rimraf } from "rimraf"; import { ExecOptions } from 'shelljs'; -import Mail = require('nodemailer/lib/mailer'); +import * as Mail from 'nodemailer/lib/mailer'; const projectRoot = path.resolve(__dirname, "../../"); export function pathFromRoot(relative?: string) { @@ -103,8 +103,10 @@ export const createIfNotExists = async (path: string) => { }; export async function Prune(rootDirectory: string): Promise<boolean> { - const error = await new Promise<Error>(resolve => rimraf(rootDirectory, resolve)); - return error === null; + // const error = await new Promise<Error>(resolve => rimraf(rootDirectory).then(resolve)); + await new Promise<void>(resolve => rimraf(rootDirectory).then(() => resolve())); + // return error === null; + return true; } export const Destroy = (mediaPath: string) => new Promise<boolean>(resolve => unlink(mediaPath, error => resolve(error === null))); diff --git a/src/server/ApiManagers/DeleteManager.ts b/src/server/ApiManagers/DeleteManager.ts index 46c0d8a8a..56d8aff60 100644 --- a/src/server/ApiManagers/DeleteManager.ts +++ b/src/server/ApiManagers/DeleteManager.ts @@ -2,7 +2,7 @@ import ApiManager, { Registration } from "./ApiManager"; import { Method, _permission_denied } from "../RouteManager"; import { WebSocket } from "../websocket"; import { Database } from "../database"; -import rimraf = require("rimraf"); +import { rimraf } from "rimraf"; import { filesDirectory } from ".."; import { DashUploadUtils } from "../DashUploadUtils"; import { mkdirSync } from "fs"; diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index 391f67bbb..42b674ad1 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -11,7 +11,7 @@ import RouteSubscriber from '../RouteSubscriber'; import { AcceptableMedia, Upload } from '../SharedMediaTypes'; import ApiManager, { Registration } from './ApiManager'; import { SolrManager } from './SearchManager'; -import v4 = require('uuid/v4'); +import * as v4 from 'uuid/v4'; import { DashVersion } from '../../fields/DocSymbols'; const AdmZip = require('adm-zip'); const imageDataUri = require('image-data-uri'); @@ -63,7 +63,7 @@ export default class UploadManager extends ApiManager { method: Method.POST, subscription: '/uploadFormData', secureHandler: async ({ req, res }) => { - const form = new formidable.IncomingForm(); + const form = new formidable.IncomingForm({ keepExtensions: true, uploadDir: pathToDirectory(Directory.parsed_files) }); let fileguids = ''; let filesize = ''; form.on('field', (e: string, value: string) => { @@ -77,19 +77,19 @@ export default class UploadManager extends ApiManager { fileguids.split(';').map(guid => DashUploadUtils.uploadProgress.set(guid, `upload starting`)); form.on('progress', e => fileguids.split(';').map(guid => DashUploadUtils.uploadProgress.set(guid, `read:(${Math.round((100 * +e) / +filesize)}%) ${e} of ${filesize}`))); - form.keepExtensions = true; - form.uploadDir = pathToDirectory(Directory.parsed_files); return new Promise<void>(resolve => { form.parse(req, async (_err, _fields, files) => { const results: Upload.FileResponse[] = []; if (_err?.message) { results.push({ source: { + filepath: '', + originalFilename: 'none', + newFilename: 'none', + mimetype: 'text', size: 0, - path: 'none', - name: 'none', - type: 'none', - toJSON: () => ({ name: 'none', path: '' }), + hashAlgorithm: 'md5', + toJSON: () => ({ name: 'none', size: 0, length: 0, mtime: new Date(), filepath: '', originalFilename: 'none', newFilename: 'none', mimetype: 'text' }), }, result: { name: 'failed upload', message: `${_err.message}` }, }); @@ -98,8 +98,8 @@ export default class UploadManager extends ApiManager { for (const key in files) { const f = files[key]; - if (!Array.isArray(f)) { - const result = await DashUploadUtils.upload(f, key); // key is the guid used by the client to track upload progress. + if (f) { + const result = await DashUploadUtils.upload(f[0], key); // key is the guid used by the client to track upload progress. result && !(result.result instanceof Error) && results.push(result); } } @@ -197,8 +197,7 @@ export default class UploadManager extends ApiManager { method: Method.POST, subscription: '/uploadDoc', secureHandler: ({ req, res }) => { - const form = new formidable.IncomingForm(); - form.keepExtensions = true; + const form = new formidable.IncomingForm({ keepExtensions: true }); // let path = req.body.path; const ids: { [id: string]: string } = {}; let remap = true; @@ -245,15 +244,16 @@ export default class UploadManager extends ApiManager { }; return new Promise<void>(resolve => { form.parse(req, async (_err, fields, files) => { - remap = fields.remap !== 'false'; + remap = Object.keys(fields).some(key => key === 'remap' && !fields.remap?.includes('false')); //.remap !== 'false'; // bcz: looking to see if the field 'remap' is set to 'false' let id: string = ''; let docids: string[] = []; let linkids: string[] = []; try { for (const name in files) { const f = files[name]; - const path_2 = Array.isArray(f) ? '' : f.path; - const zip = new AdmZip(path_2); + if (!f) continue; + const path_2 = f[0]; // what about the rest of the array? are we guaranteed only one value is set? + const zip = new AdmZip(path_2.filepath); zip.getEntries().forEach((entry: any) => { let entryName = entry.entryName.replace(/%%%/g, '/'); if (!entryName.startsWith('files/')) { @@ -299,7 +299,7 @@ export default class UploadManager extends ApiManager { } catch (e) { console.log(e); } - unlink(path_2, () => {}); + unlink(path_2.filepath, () => {}); } SolrManager.update(); res.send(JSON.stringify({ id, docids, linkids } || 'error')); diff --git a/src/server/ApiManagers/UserManager.ts b/src/server/ApiManagers/UserManager.ts index 8b7994eac..9252202b0 100644 --- a/src/server/ApiManagers/UserManager.ts +++ b/src/server/ApiManagers/UserManager.ts @@ -32,7 +32,7 @@ export default class UserManager extends ApiManager { secureHandler: async ({ user, req, res }) => { const result: any = {}; user.cacheDocumentIds = req.body.cacheDocumentIds; - user.save(err => { + user.save().then(undefined, err => { if (err) { result.error = [{ msg: 'Error while caching documents' }]; } @@ -125,7 +125,7 @@ export default class UserManager extends ApiManager { user.passwordResetExpires = undefined; } - user.save(err => { + user.save().then(undefined, err => { if (err) { result.error = [{ msg: 'Error while saving new password' }]; } diff --git a/src/server/DashSession/DashSessionAgent.ts b/src/server/DashSession/DashSessionAgent.ts index 2450798aa..1ef7a131d 100644 --- a/src/server/DashSession/DashSessionAgent.ts +++ b/src/server/DashSession/DashSessionAgent.ts @@ -8,7 +8,7 @@ import { launchServer, onWindows } from '..'; import { readdirSync, statSync, createWriteStream, readFileSync, unlinkSync } from 'fs'; import * as Archiver from 'archiver'; import { resolve } from 'path'; -import * as rimraf from 'rimraf'; +import { rimraf } from 'rimraf'; import { AppliedSessionAgent, ExitHandler } from './Session/agents/applied_session_agent'; import { ServerWorker } from './Session/agents/server_worker'; import { Monitor } from './Session/agents/monitor'; diff --git a/src/server/DashSession/Session/agents/applied_session_agent.ts b/src/server/DashSession/Session/agents/applied_session_agent.ts index 8339a06dc..2037e93e5 100644 --- a/src/server/DashSession/Session/agents/applied_session_agent.ts +++ b/src/server/DashSession/Session/agents/applied_session_agent.ts @@ -1,7 +1,8 @@ -import { isMaster } from "cluster"; +import * as _cluster from "cluster"; import { Monitor } from "./monitor"; import { ServerWorker } from "./server_worker"; -import { Utilities } from "../utilities/utilities"; +const cluster = _cluster as any; +const isMaster = cluster.isPrimary; export type ExitHandler = (reason: Error | boolean) => void | Promise<void>; @@ -15,13 +16,13 @@ export abstract class AppliedSessionAgent { private launched = false; public killSession = (reason: string, graceful = true, errorCode = 0) => { - const target = isMaster ? this.sessionMonitor : this.serverWorker; + const target = cluster.default.isPrimary ? this.sessionMonitor : this.serverWorker; target.killSession(reason, graceful, errorCode); } private sessionMonitorRef: Monitor | undefined; public get sessionMonitor(): Monitor { - if (!isMaster) { + if (!cluster.default.isPrimary) { this.serverWorker.emit("kill", { graceful: false, reason: "Cannot access the session monitor directly from the server worker thread.", diff --git a/src/server/DashSession/Session/agents/monitor.ts b/src/server/DashSession/Session/agents/monitor.ts index 9cb5ab576..0f469285e 100644 --- a/src/server/DashSession/Session/agents/monitor.ts +++ b/src/server/DashSession/Session/agents/monitor.ts @@ -1,7 +1,8 @@ import { ExitHandler } from "./applied_session_agent"; import { Configuration, configurationSchema, defaultConfig, Identifiers, colorMapping } from "../utilities/session_config"; import Repl, { ReplAction } from "../utilities/repl"; -import { isWorker, setupMaster, on, Worker, fork } from "cluster"; +import * as _cluster from "cluster"; +import { Worker } from "cluster"; import { manage, MessageHandler, ErrorLike } from "./promisified_ipc_manager"; import { red, cyan, white, yellow, blue } from "colors"; import { exec, ExecOptions } from "child_process"; @@ -10,6 +11,11 @@ import { Utilities } from "../utilities/utilities"; import { readFileSync } from "fs"; import IPCMessageReceiver from "./process_message_router"; import { ServerWorker } from "./server_worker"; +const cluster = _cluster as any; +const isWorker = cluster.isWorker; +const setupMaster = cluster.setupPrimary; +const on = cluster.on; +const fork = cluster.fork; /** * Validates and reads the configuration file, accordingly builds a child process factory @@ -20,7 +26,7 @@ export class Monitor extends IPCMessageReceiver { private finalized = false; private exitHandlers: ExitHandler[] = []; private readonly config: Configuration; - private activeWorker: Worker | undefined; + private activeWorker: Worker| undefined; private key: string | undefined; private repl: Repl; @@ -281,8 +287,11 @@ export class Monitor extends IPCMessageReceiver { pollingIntervalSeconds: polling.intervalSeconds, session_key: key }); - Monitor.IPCManager = manage(this.activeWorker.process, this.handlers); + if (this.activeWorker) { + Monitor.IPCManager = manage(this.activeWorker.process, this.handlers); + } this.mainLog(cyan(`spawned new server worker with process id ${this.activeWorker?.process.pid}`)); + } } diff --git a/src/server/DashSession/Session/agents/server_worker.ts b/src/server/DashSession/Session/agents/server_worker.ts index 634b0113d..d8b3ee80b 100644 --- a/src/server/DashSession/Session/agents/server_worker.ts +++ b/src/server/DashSession/Session/agents/server_worker.ts @@ -1,4 +1,4 @@ -import { isMaster } from "cluster"; +import cluster from "cluster"; import { green, red, white, yellow } from "colors"; import { get } from "request-promise"; import { ExitHandler } from "./applied_session_agent"; @@ -22,7 +22,7 @@ export class ServerWorker extends IPCMessageReceiver { private serverPort: number; private isInitialized = false; public static Create(work: Function) { - if (isMaster) { + if (cluster.isPrimary) { console.error(red("cannot create a worker on the monitor process.")); process.exit(1); } else if (++ServerWorker.count > 1) { diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index fa29cb7c1..643626ae9 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -119,14 +119,14 @@ export namespace DashUploadUtils { }; } - function resolveExistingFile(name: string, pat: string, directory: Directory, type?: string, duration?: number, rawText?: string) { - const data = { size: 0, path: path.basename(pat), name, type: type ?? '' }; - const file = { ...data, toJSON: () => ({ ...data, filename: data.path.replace(/.*\//, ''), mtime: duration?.toString(), mime: '', toJson: () => undefined as any }) }; + function resolveExistingFile(name: string, pat: string, directory: Directory, type?: string | null, duration?: number, rawText?: string): Upload.FileResponse<Upload.FileInformation> { + const data = { size: 0, filepath: path.basename(pat), name, type: type ?? '', originalFilename: name, newFilename: name, mimetype: '', hashAlgorithm: 'md5' as 'md5' }; + const file = { ...data, toJSON: () => ({ ...data, length: 0, filename: data.filepath.replace(/.*\//, ''), mtime: new Date(), mime: '', toJson: () => undefined as any }) }; return { source: file, result: { accessPaths: { - agnostic: getAccessPaths(directory, data.path), + agnostic: getAccessPaths(directory, data.filepath), }, rawText, duration, @@ -144,8 +144,8 @@ export namespace DashUploadUtils { export function uploadYoutube(videoId: string, overwriteId: string): Promise<Upload.FileResponse> { return new Promise<Upload.FileResponse<Upload.FileInformation>>((res, rej) => { const name = videoId; - const path = name.replace(/^-/, '__') + '.mp4'; - const finalPath = serverPathToFile(Directory.videos, path); + const filepath = name.replace(/^-/, '__') + '.mp4'; + const finalPath = serverPathToFile(Directory.videos, filepath); if (existsSync(finalPath)) { uploadProgress.set(overwriteId, 'computing duration'); exec(`yt-dlp -o ${finalPath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any, stderr: any) => { @@ -155,7 +155,7 @@ export namespace DashUploadUtils { }); } else { uploadProgress.set(overwriteId, 'starting download'); - const ytdlp = spawn(`yt-dlp`, ['-o', path, `https://www.youtube.com/watch?v=${videoId}`, '--max-filesize', '100M', '-f', 'mp4']); + const ytdlp = spawn(`yt-dlp`, ['-o', filepath, `https://www.youtube.com/watch?v=${videoId}`, '--max-filesize', '100M', '-f', 'mp4']); ytdlp.stdout.on('data', (data: any) => uploadProgress.set(overwriteId, data.toString())); @@ -170,20 +170,20 @@ export namespace DashUploadUtils { res({ source: { size: 0, - path, - name, + path: filepath, + newFilename: name, type: '', - toJSON: () => ({ name, path }), + toJSON: () => ({ newFilename: name, filepath }), }, result: { name: 'failed youtube query', message: `Could not archive video. ${code ? errors : uploadProgress.get(videoId)}` }, }); } else { uploadProgress.set(overwriteId, 'computing duration'); - exec(`yt-dlp-o ${path} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any, stderr: any) => { + exec(`yt-dlp-o ${filepath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any, stderr: any) => { const time = Array.from(stdout.trim().split(':')).reverse(); const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0); - const data = { size: 0, path, name, type: 'video/mp4' }; - const file = { ...data, toJSON: () => ({ ...data, filename: data.path.replace(/.*\//, ''), mtime: duration.toString(), mime: '', toJson: () => undefined as any }) }; + const data = { size: 0, filepath, name, mimetype: '', originalFilename: name, newFilename: name, hashAlgorithm: 'md5' as 'md5', type: 'video/mp4' }; + const file = { ...data, toJSON: () => ({ ...data, length: 0, filename: data.filepath.replace(/.*\//, ''), mtime: new Date(), mime: '', hashAlgorithm: 'md5' as 'md5', toJson: () => undefined as any }) }; res(MoveParsedFile(file, Directory.videos)); }); } @@ -194,7 +194,7 @@ export namespace DashUploadUtils { export async function upload(file: File, overwriteGuid?: string): Promise<Upload.FileResponse> { const isAzureOn = usingAzure(); - const { type, path, name } = file; + const { mimetype: type, filepath, originalFilename } = file; const types = type?.split('/') ?? []; // uploadProgress.set(overwriteGuid ?? name, 'uploading'); // If the client sent a guid it uses to track upload progress, use that guid. Otherwise, use the file's name. @@ -205,7 +205,7 @@ export namespace DashUploadUtils { switch (category) { case 'image': if (imageFormats.includes(format)) { - const result = await UploadImage(path, basename(path)); + const result = await UploadImage(filepath, basename(filepath)); return { source: file, result }; } fs.unlink(path, () => {}); @@ -214,19 +214,19 @@ export namespace DashUploadUtils { if (format.includes('x-matroska')) { console.log('case video'); await new Promise(res => - ffmpeg(file.path) + ffmpeg(file.filepath) .videoCodec('copy') // this will copy the data instead of reencode it - .save(file.path.replace('.mkv', '.mp4')) + .save(file.filepath.replace('.mkv', '.mp4')) .on('end', res) .on('error', (e: any) => console.log(e)) ); - file.path = file.path.replace('.mkv', '.mp4'); + file.filepath = file.filepath.replace('.mkv', '.mp4'); format = '.mp4'; } if (format.includes('quicktime')) { let abort = false; await new Promise<void>(res => - ffmpeg.ffprobe(file.path, (err: any, metadata: any) => { + ffmpeg.ffprobe(file.filepath, (err: any, metadata: any) => { if (metadata.streams.some((stream: any) => stream.codec_name === 'hevc')) { abort = true; } @@ -280,18 +280,18 @@ export namespace DashUploadUtils { } async function UploadPdf(file: File) { - const fileKey = (await md5File(file.path)) + '.pdf'; + const fileKey = (await md5File(file.filepath)) + '.pdf'; const textFilename = `${fileKey.substring(0, fileKey.length - 4)}.txt`; if (fExists(fileKey, Directory.pdfs) && fExists(textFilename, Directory.text)) { - fs.unlink(file.path, () => {}); + fs.unlink(file.filepath, () => {}); return new Promise<Upload.FileResponse>(res => { const textFilename = `${fileKey.substring(0, fileKey.length - 4)}.txt`; const readStream = createReadStream(serverPathToFile(Directory.text, textFilename)); var rawText = ''; - readStream.on('data', chunk => (rawText += chunk.toString())).on('end', () => res(resolveExistingFile(file.name, fileKey, Directory.pdfs, file.type, undefined, rawText))); + readStream.on('data', chunk => (rawText += chunk.toString())).on('end', () => res(resolveExistingFile(file.originalFilename ?? '', fileKey, Directory.pdfs, file.mimetype, undefined, rawText))); }); } - const dataBuffer = readFileSync(file.path); + const dataBuffer = readFileSync(file.filepath); const result: ParsedPDF | any = await parse(dataBuffer).catch((e: any) => e); if (!result.code) { await new Promise<void>((resolve, reject) => { @@ -300,11 +300,11 @@ export namespace DashUploadUtils { }); return MoveParsedFile(file, Directory.pdfs, undefined, result?.text, undefined, fileKey); } - return { source: file, result: { name: 'faile pdf pupload', message: `Could not upload (${file.name}).${result.message}` } }; + return { source: file, result: { name: 'faile pdf pupload', message: `Could not upload (${file.originalFilename}).${result.message}` } }; } async function UploadCsv(file: File) { - const { path: sourcePath } = file; + const { filepath: sourcePath } = file; // read the file as a string const data = readFileSync(sourcePath, 'utf8'); // split the string into an array of lines @@ -462,12 +462,12 @@ export namespace DashUploadUtils { * to appear in the new location */ export async function MoveParsedFile(file: formidable.File, destination: Directory, suffix: string | undefined = undefined, text?: string, duration?: number, targetName?: string): Promise<Upload.FileResponse> { - const { path: sourcePath } = file; - let name = targetName ?? path.basename(sourcePath); + const { filepath } = file; + let name = targetName ?? path.basename(filepath); suffix && (name += suffix); return new Promise(resolve => { const destinationPath = serverPathToFile(destination, name); - rename(sourcePath, destinationPath, error => { + rename(filepath, destinationPath, error => { resolve({ source: file, result: error diff --git a/src/server/GarbageCollector.ts b/src/server/GarbageCollector.ts index c60880882..423c719c2 100644 --- a/src/server/GarbageCollector.ts +++ b/src/server/GarbageCollector.ts @@ -65,7 +65,7 @@ async function GarbageCollect(full: boolean = true) { // await new Promise(res => setTimeout(res, 3000)); const cursor = await Database.Instance.query({}, { userDocumentId: 1 }, 'users'); const users = await cursor.toArray(); - const ids: string[] = [...users.map(user => user.userDocumentId), ...users.map(user => user.sharingDocumentId), ...users.map(user => user.linkDatabaseId)]; + const ids: string[] = [...users.map((user:any) => user.userDocumentId), ...users.map((user:any) => user.sharingDocumentId), ...users.map((user:any) => user.linkDatabaseId)]; const visited = new Set<string>(); const files: { [name: string]: string[] } = {}; @@ -95,7 +95,7 @@ async function GarbageCollect(full: boolean = true) { const notToDelete = Array.from(visited); const toDeleteCursor = await Database.Instance.query({ _id: { $nin: notToDelete } }, { _id: 1 }); - const toDelete: string[] = (await toDeleteCursor.toArray()).map(doc => doc._id); + const toDelete: string[] = (await toDeleteCursor.toArray()).map((doc:any) => doc._id); toDeleteCursor.close(); if (!full) { await Database.Instance.updateMany({ _id: { $nin: notToDelete } }, { $set: { "deleted": true } }); diff --git a/src/server/IDatabase.ts b/src/server/IDatabase.ts index dd4968579..2274792b3 100644 --- a/src/server/IDatabase.ts +++ b/src/server/IDatabase.ts @@ -3,13 +3,13 @@ import { Transferable } from './Message'; export const DocumentsCollection = 'documents'; export interface IDatabase { - update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert?: boolean, collectionName?: string): Promise<void>; - updateMany(query: any, update: any, collectionName?: string): Promise<mongodb.WriteOpResult>; + update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateResult) => void, upsert?: boolean, collectionName?: string): Promise<void>; + updateMany(query: any, update: any, collectionName?: string): Promise<mongodb.UpdateResult>; - replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert?: boolean, collectionName?: string): void; + replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateResult) => void, upsert?: boolean, collectionName?: string): void; - delete(query: any, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; - delete(id: string, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; + delete(query: any, collectionName?: string): Promise<mongodb.DeleteResult>; + delete(id: string, collectionName?: string): Promise<mongodb.DeleteResult>; dropSchema(...schemaNames: string[]): Promise<any>; @@ -20,5 +20,5 @@ export interface IDatabase { getCollectionNames(): Promise<string[]>; visit(ids: string[], fn: (result: any) => string[] | Promise<string[]>, collectionName?: string): Promise<void>; - query(query: { [key: string]: any }, projection?: { [key: string]: 0 | 1 }, collectionName?: string): Promise<mongodb.Cursor>; + query(query: { [key: string]: any }, projection?: { [key: string]: 0 | 1 }, collectionName?: string): Promise<mongodb.FindCursor>; } diff --git a/src/server/MemoryDatabase.ts b/src/server/MemoryDatabase.ts index d2d8bb3b3..b74332bf5 100644 --- a/src/server/MemoryDatabase.ts +++ b/src/server/MemoryDatabase.ts @@ -19,7 +19,7 @@ export class MemoryDatabase implements IDatabase { return Promise.resolve(Object.keys(this.db)); } - public update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, _upsert?: boolean, collectionName = DocumentsCollection): Promise<void> { + public update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateResult) => void, _upsert?: boolean, collectionName = DocumentsCollection): Promise<void> { const collection = this.getCollection(collectionName); const set = "$set"; if (set in value) { @@ -45,17 +45,17 @@ export class MemoryDatabase implements IDatabase { return Promise.resolve(undefined); } - public updateMany(query: any, update: any, collectionName = DocumentsCollection): Promise<mongodb.WriteOpResult> { + public updateMany(query: any, update: any, collectionName = DocumentsCollection): Promise<mongodb.UpdateResult> { throw new Error("Can't updateMany a MemoryDatabase"); } - public replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert?: boolean, collectionName = DocumentsCollection): void { + public replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateResult) => void, upsert?: boolean, collectionName = DocumentsCollection): void { this.update(id, value, callback, upsert, collectionName); } - public delete(query: any, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; - public delete(id: string, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; - public delete(id: any, collectionName = DocumentsCollection): Promise<mongodb.DeleteWriteOpResultObject> { + public delete(query: any, collectionName?: string): Promise<mongodb.DeleteResult>; + public delete(id: string, collectionName?: string): Promise<mongodb.DeleteResult>; + public delete(id: any, collectionName = DocumentsCollection): Promise<mongodb.DeleteResult> { const i = id.id ?? id; delete this.getCollection(collectionName)[i]; @@ -105,7 +105,7 @@ export class MemoryDatabase implements IDatabase { } } - public query(): Promise<mongodb.Cursor> { + public query(): Promise<mongodb.FindCursor> { throw new Error("Can't query a MemoryDatabase"); } } diff --git a/src/server/ProcessFactory.ts b/src/server/ProcessFactory.ts index 63682368f..f69eda4c3 100644 --- a/src/server/ProcessFactory.ts +++ b/src/server/ProcessFactory.ts @@ -2,7 +2,7 @@ import { ChildProcess, spawn, StdioOptions } from "child_process"; import { existsSync, mkdirSync } from "fs"; import { Stream } from "stream"; import { fileDescriptorFromStream, pathFromRoot } from './ActionUtilities'; -import rimraf = require("rimraf"); +import { rimraf } from "rimraf"; export namespace ProcessFactory { @@ -13,7 +13,7 @@ export namespace ProcessFactory { const log_fd = await Logger.create(command, args); stdio = ["ignore", log_fd, log_fd]; } - const child = spawn(command, args, { detached, stdio }); + const child = spawn(command, args ?? [], { detached, stdio }); child.unref(); return child; } @@ -27,7 +27,7 @@ export namespace Logger { export async function initialize() { if (existsSync(logPath)) { if (!process.env.SPAWNED) { - await new Promise<any>(resolve => rimraf(logPath, resolve)); + await new Promise<any>(resolve => rimraf(logPath).then(resolve)); } } mkdirSync(logPath); diff --git a/src/server/authentication/AuthenticationManager.ts b/src/server/authentication/AuthenticationManager.ts index 52d876e95..74d8d2523 100644 --- a/src/server/authentication/AuthenticationManager.ts +++ b/src/server/authentication/AuthenticationManager.ts @@ -3,10 +3,9 @@ import { Request, Response, NextFunction } from 'express'; import * as passport from 'passport'; import { IVerifyOptions } from 'passport-local'; import './Passport'; -import flash = require('express-flash'); import * as async from 'async'; import * as nodemailer from 'nodemailer'; -import c = require('crypto'); +import * as c from 'crypto'; import { emptyFunction, Utils } from '../../Utils'; import { MailOptions } from 'nodemailer/lib/stream-transport'; @@ -62,7 +61,7 @@ export let postSignup = (req: Request, res: Response, next: NextFunction) => { if (existingUser) { return res.redirect('/login'); } - user.save((err: any) => { + user.save().then(undefined, (err: any) => { if (err) { return next(err); } @@ -187,7 +186,7 @@ export let postForgot = function (req: Request, res: Response, next: NextFunctio } user.passwordResetToken = token; user.passwordResetExpires = new Date(Date.now() + 3600000); // 1 HOUR - user.save(function (err: any) { + user.save().then(undefined, (err: any) => { done(null, token, user); }); }); @@ -259,7 +258,7 @@ export let postReset = function (req: Request, res: Response) { user.passwordResetToken = undefined; user.passwordResetExpires = undefined; - user.save(function (err) { + user.save().then(undefined, (err:any) => { if (err) { res.redirect('/login'); return; diff --git a/src/server/database.ts b/src/server/database.ts index 725b66836..37bc00a85 100644 --- a/src/server/database.ts +++ b/src/server/database.ts @@ -7,6 +7,7 @@ import { DocumentsCollection, IDatabase } from './IDatabase'; import { MemoryDatabase } from './MemoryDatabase'; import { Transferable } from './Message'; import { Upload } from './SharedMediaTypes'; +import { ObjectId } from 'mongodb'; export namespace Database { @@ -26,7 +27,7 @@ export namespace Database { export async function tryInitializeConnection() { try { const { connection } = mongoose; - disconnect = async () => new Promise<any>(resolve => connection.close(resolve)); + disconnect = async () => new Promise<any>(resolve => connection.close().then(resolve)); if (connection.readyState === ConnectionStates.disconnected) { await new Promise<void>((resolve, reject) => { connection.on('error', reject); @@ -35,8 +36,7 @@ export namespace Database { resolve(); }); mongoose.connect(url, { - useNewUrlParser: true, - useUnifiedTopology: true, + //useNewUrlParser: true, dbName: schema, // reconnectTries: Number.MAX_VALUE, // reconnectInterval: 1000, @@ -60,11 +60,10 @@ export namespace Database { async doConnect() { console.error(`\nConnecting to Mongo with URL : ${url}\n`); return new Promise<void>(resolve => { - this.MongoClient.connect(url, { connectTimeoutMS: 30000, socketTimeoutMS: 30000, useUnifiedTopology: true }, (_err, client) => { + this.MongoClient.connect(url, { connectTimeoutMS: 30000, socketTimeoutMS: 30000, }).then(client => { console.error("mongo connect response\n"); if (!client) { console.error("\nMongo connect failed with the error:\n"); - console.log(_err); process.exit(0); } this.db = client.db(); @@ -74,20 +73,19 @@ export namespace Database { }); } - public async update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = DocumentsCollection) { + public async update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateResult) => void, upsert = true, collectionName = DocumentsCollection) { if (this.db) { const collection = this.db.collection(collectionName); const prom = this.currentWrites[id]; let newProm: Promise<void>; const run = (): Promise<void> => { return new Promise<void>(resolve => { - collection.updateOne({ _id: id }, value, { upsert } - , (err, res) => { + collection.updateOne({ _id: new ObjectId(id) }, value, { upsert }).then(res => { if (this.currentWrites[id] === newProm) { delete this.currentWrites[id]; } resolve(); - callback(err, res); + callback(undefined as any, res); }); }); }; @@ -99,20 +97,19 @@ export namespace Database { } } - public replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = DocumentsCollection) { + public replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateResult<mongodb.Document>) => void, upsert = true, collectionName = DocumentsCollection) { if (this.db) { const collection = this.db.collection(collectionName); const prom = this.currentWrites[id]; let newProm: Promise<void>; const run = (): Promise<void> => { return new Promise<void>(resolve => { - collection.replaceOne({ _id: id }, value, { upsert } - , (err, res) => { + collection.replaceOne({ _id: new ObjectId(id)}, value, { upsert }).then( res => { if (this.currentWrites[id] === newProm) { delete this.currentWrites[id]; } resolve(); - callback(err, res); + callback(undefined as any, res as any); }); }); }; @@ -135,15 +132,15 @@ export namespace Database { return collectionNames; } - public delete(query: any, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; - public delete(id: string, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; + public delete(query: any, collectionName?: string): Promise<mongodb.DeleteResult>; + public delete(id: string, collectionName?: string): Promise<mongodb.DeleteResult>; public delete(id: any, collectionName = DocumentsCollection) { if (typeof id === "string") { - id = { _id: id }; + id = { _id: new ObjectId(id) }; } if (this.db) { const db = this.db; - return new Promise(res => db.collection(collectionName).deleteMany(id, (err, result) => res(result))); + return new Promise(res => db.collection(collectionName).deleteMany(id).then(result => res(result))); } else { return new Promise(res => this.onConnect.push(() => res(this.delete(id, collectionName)))); } @@ -180,7 +177,7 @@ export namespace Database { let newProm: Promise<void>; const run = (): Promise<void> => { return new Promise<void>(resolve => { - collection.insertOne(value, (err, res) => { + collection.insertOne(value).then(res => { if (this.currentWrites[id] === newProm) { delete this.currentWrites[id]; } @@ -198,11 +195,11 @@ export namespace Database { public getDocument(id: string, fn: (result?: Transferable) => void, collectionName = DocumentsCollection) { if (this.db) { - this.db.collection(collectionName).findOne({ _id: id }, (err, result) => { + this.db.collection(collectionName).findOne({ _id: new ObjectId(id) }).then(result => { if (result) { result.id = result._id; - delete result._id; - fn(result); + //delete result._id; + fn(result.id); } else { fn(undefined); } @@ -214,12 +211,12 @@ export namespace Database { public getDocuments(ids: string[], fn: (result: Transferable[]) => void, collectionName = DocumentsCollection) { if (this.db) { - this.db.collection(collectionName).find({ _id: { "$in": ids } }).toArray((err, docs) => { - if (err) { - console.log(err.message); - console.log(err.errmsg); - } - fn(docs.map(doc => { + this.db.collection(collectionName).find({ _id: { "$in": ids.map(id => new ObjectId(id)) } }).map(docs => { + // if (err) { + // console.log(err.message); + // console.log(err.errmsg); + // } + fn(docs.map((doc:any) => { doc.id = doc._id; delete doc._id; return doc; @@ -257,15 +254,15 @@ export namespace Database { } } - public query(query: { [key: string]: any }, projection?: { [key: string]: 0 | 1 }, collectionName = DocumentsCollection): Promise<mongodb.Cursor> { + public query(query: { [key: string]: any }, projection?: { [key: string]: 0 | 1 }, collectionName = DocumentsCollection): Promise<mongodb.FindCursor> { if (this.db) { let cursor = this.db.collection(collectionName).find(query); if (projection) { cursor = cursor.project(projection); } - return Promise.resolve<mongodb.Cursor>(cursor); + return Promise.resolve<mongodb.FindCursor>(cursor); } else { - return new Promise<mongodb.Cursor>(res => { + return new Promise<mongodb.FindCursor>(res => { this.onConnect.push(() => res(this.query(query, projection, collectionName))); }); } @@ -274,9 +271,9 @@ export namespace Database { public updateMany(query: any, update: any, collectionName = DocumentsCollection) { if (this.db) { const db = this.db; - return new Promise<mongodb.WriteOpResult>(res => db.collection(collectionName).update(query, update, (_, result) => res(result))); + return new Promise<mongodb.UpdateResult>(res => db.collection(collectionName).updateMany(query, update).then(result => res(result))); } else { - return new Promise<mongodb.WriteOpResult>(res => { + return new Promise<mongodb.UpdateResult>(res => { this.onConnect.push(() => this.updateMany(query, update, collectionName).then(res)); }); } @@ -322,7 +319,7 @@ export namespace Database { const cursor = await Instance.query(query, undefined, collection); const results = await cursor.toArray(); const slice = results.slice(0, Math.min(cap, results.length)); - return removeId ? slice.map(result => { + return removeId ? slice.map((result:any) => { delete result._id; return result; }) : slice; diff --git a/src/server/server_Initialization.ts b/src/server/server_Initialization.ts index ccb709453..af8b8dfdd 100644 --- a/src/server/server_Initialization.ts +++ b/src/server/server_Initialization.ts @@ -22,12 +22,11 @@ import { Database } from './database'; import RouteManager from './RouteManager'; import RouteSubscriber from './RouteSubscriber'; import { WebSocket } from './websocket'; -import brotli = require('brotli'); -import expressFlash = require('express-flash'); -import flash = require('connect-flash'); -const MongoStore = require('connect-mongo')(session); -const config = require('../../webpack.config'); -const compiler = webpack(config); +import * as brotli from 'brotli'; +import * as expressFlash from 'express-flash'; +import * as flash from 'connect-flash'; +import * as MongoStoreConnect from 'connect-mongo'; +import * as config from '../../webpack.config'; /* RouteSetter is a wrapper around the server that prevents the server from being exposed. */ @@ -40,8 +39,7 @@ export let resolvedServerUrl: string; export default async function InitializeServer(routeSetter: RouteSetter) { const isRelease = determineEnvironment(); const app = buildWithMiddleware(express()); - - const compiler = webpack(config); + const compiler = webpack(config as any); app.use( require('webpack-dev-middleware')(compiler, { @@ -89,13 +87,13 @@ function buildWithMiddleware(server: express.Express) { resave: true, cookie: { maxAge: week }, saveUninitialized: true, - store: process.env.DB === 'MEM' ? new session.MemoryStore() : new MongoStore({ url: Database.url }), + store: process.env.DB === 'MEM' ? new session.MemoryStore() : MongoStoreConnect.create({ mongoUrl: Database.url }), }), flash(), expressFlash(), bodyParser.json({ limit: '10mb' }), bodyParser.urlencoded({ extended: true }), - expressValidator(), + expressValidator.body, passport.initialize(), passport.session(), (req: express.Request, res: express.Response, next: express.NextFunction) => { @@ -110,6 +108,7 @@ function buildWithMiddleware(server: express.Express) { next(); }, ].forEach(next => server.use(next)); + return server; } diff --git a/src/server/websocket.ts b/src/server/websocket.ts index be5cdb202..4453001b0 100644 --- a/src/server/websocket.ts +++ b/src/server/websocket.ts @@ -3,7 +3,7 @@ import * as express from 'express'; import { createServer, Server } from 'https'; import { networkInterfaces } from 'os'; import * as sio from 'socket.io'; -import { Socket } from 'socket.io'; +import * as _socket from 'socket.io'; import { Opt } from '../fields/Doc'; import { Utils } from '../Utils'; import { logPort } from './ActionUtilities'; @@ -19,6 +19,7 @@ import { Diff, GestureContent, MessageStore, MobileDocumentUploadContent, Mobile import { Search } from './Search'; import { resolvedPorts } from './server_Initialization'; var _ = require('lodash'); +type Socket = typeof _socket; export namespace WebSocket { export let _socket: Socket; @@ -67,7 +68,7 @@ export namespace WebSocket { socket.on('create or join', function (room) { console.log('Received request to create or join room ' + room); - const clientsInRoom = socket.adapter.rooms[room]; + const clientsInRoom = socket.rooms.has(room); const numClients = clientsInRoom ? Object.keys(clientsInRoom.sockets).length : 0; console.log('Room ' + room + ' now has ' + numClients + ' client(s)'); @@ -90,7 +91,7 @@ export namespace WebSocket { socket.on('ipaddr', function () { const ifaces = networkInterfaces(); for (const dev in ifaces) { - ifaces[dev].forEach(function (details) { + ifaces[dev]?.forEach(function (details) { if (details.family === 'IPv4' && details.address !== '127.0.0.1') { socket.emit('ipaddr', details.address); } |