diff options
Diffstat (limited to 'src/server')
| -rw-r--r-- | src/server/ApiManagers/GooglePhotosManager.ts | 6 | ||||
| -rw-r--r-- | src/server/ApiManagers/UploadManager.ts | 14 | ||||
| -rw-r--r-- | src/server/DashStats.ts | 1 | ||||
| -rw-r--r-- | src/server/DashUploadUtils.ts | 65 | ||||
| -rw-r--r-- | src/server/IDatabase.ts | 8 | ||||
| -rw-r--r-- | src/server/MemoryDatabase.ts | 6 | ||||
| -rw-r--r-- | src/server/Message.ts | 72 | ||||
| -rw-r--r-- | src/server/Search.ts | 1 | ||||
| -rw-r--r-- | src/server/SharedMediaTypes.ts | 4 | ||||
| -rw-r--r-- | src/server/apis/google/GoogleApiServerUtils.ts | 1 | ||||
| -rw-r--r-- | src/server/database.ts | 14 | ||||
| -rw-r--r-- | src/server/index.ts | 1 | ||||
| -rw-r--r-- | src/server/server_Initialization.ts | 7 | ||||
| -rw-r--r-- | src/server/websocket.ts | 379 |
14 files changed, 176 insertions, 403 deletions
diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts index 5feb25fd4..0970dee81 100644 --- a/src/server/ApiManagers/GooglePhotosManager.ts +++ b/src/server/ApiManagers/GooglePhotosManager.ts @@ -139,13 +139,13 @@ // const completed: Opt<Upload.ImageInformation>[] = []; // for (const { baseUrl } of mediaItems) { // // start by getting the content size of the remote image -// const results = await DashUploadUtils.InspectImage(baseUrl); -// if (results instanceof Error) { +// const result = await DashUploadUtils.InspectImage(baseUrl); +// if (result instanceof Error) { // // if something went wrong here, we can't hope to upload it, so just move on to the next // failed++; // continue; // } -// const { contentSize, ...attributes } = results; +// const { contentSize, ...attributes } = result; // // check to see if we have uploaded a Google user content image *specifically via this route* already // // that has this exact content size // const found: Opt<Upload.ImageInformation> = await Database.Auxiliary.QueryUploadHistory(contentSize); diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index 4cb3d8baf..b2624f654 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -144,7 +144,7 @@ export default class UploadManager extends ApiManager { ids[id] = uuid.v4(); return ids[id]; }; - const mapFn = (docIn: any) => { + const mapFn = (docIn: { id: string; fields: any[] }) => { const doc = docIn; if (doc.id) { doc.id = getId(doc.id); @@ -170,10 +170,10 @@ export default class UploadManager extends ApiManager { mapFn(field); } else if (typeof field === 'string') { const re = /("(?:dataD|d)ocumentId"\s*:\s*")([\w-]*)"/g; - doc.fields[key] = (field as any).replace(re, (match: any, p1: string, p2: string) => `${p1}${getId(p2)}"`); + doc.fields[key] = field.replace(re, (match: string, p1: string, p2: string) => `${p1}${getId(p2)}"`); } else if (field.__type === 'RichTextField') { const re = /("href"\s*:\s*")(.*?)"/g; - field.Data = field.Data.replace(re, (match: any, p1: string, p2: string) => `${p1}${getId(p2)}"`); + field.Data = field.Data.replace(re, (match: string, p1: string, p2: string) => `${p1}${getId(p2)}"`); } } }; @@ -192,7 +192,7 @@ export default class UploadManager extends ApiManager { if (!f) continue; const path2 = f[0]; // what about the rest of the array? are we guaranteed only one value is set? const zip = new AdmZip(path2.filepath); - zip.getEntries().forEach((entry: any) => { + zip.getEntries().forEach(entry => { const entryName = entry.entryName.replace(/%%%/g, '/'); if (!entryName.startsWith('files/')) { return; @@ -245,7 +245,7 @@ export default class UploadManager extends ApiManager { } } SolrManager.update(); - res.send(JSON.stringify({ id, docids, linkids } || 'error')); + res.send(JSON.stringify({ id, docids, linkids }) || 'error'); } catch (e) { console.log(e); } @@ -282,8 +282,8 @@ export default class UploadManager extends ApiManager { const serverPath = serverPathToFile(Directory.images, ''); const regex = new RegExp(`${deleteFiles}.*`); fs.readdirSync(serverPath) - .filter((f: any) => regex.test(f)) - .map((f: any) => fs.unlinkSync(serverPath + f)); + .filter(f => regex.test(f)) + .map(f => fs.unlinkSync(serverPath + f)); } imageDataUri.outputFile(uri, serverPathToFile(Directory.images, InjectSize(filename, origSuffix))).then((savedName: string) => { const ext = path.extname(savedName).toLowerCase(); diff --git a/src/server/DashStats.ts b/src/server/DashStats.ts index 808d2c6f2..6b9fb8971 100644 --- a/src/server/DashStats.ts +++ b/src/server/DashStats.ts @@ -9,6 +9,7 @@ import { socketMap, timeMap, userOperations } from './SocketData'; * This includes time connected, number of operations, and * the rate of their operations */ +// eslint-disable-next-line @typescript-eslint/no-namespace export namespace DashStats { export const SAMPLING_INTERVAL = 1000; // in milliseconds (ms) - Time interval to update the frontend. export const RATE_INTERVAL = 10; // in seconds (s) - Used to calculate rate diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index 08cea1de5..5e58db103 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -1,6 +1,7 @@ import axios from 'axios'; +import { spawn, exec } from 'child_process'; import { green, red } from 'colors'; -import { ExifImage } from 'exif'; +import { ExifData, ExifImage } from 'exif'; import * as exifr from 'exifr'; import * as ffmpeg from 'fluent-ffmpeg'; import * as formidable from 'formidable'; @@ -18,13 +19,11 @@ import { Duplex, Stream } from 'stream'; import { Utils } from '../Utils'; import { createIfNotExists } from './ActionUtilities'; import { AzureManager } from './ApiManagers/AzureManager'; -import { ParsedPDF } from './PdfTypes'; import { AcceptableMedia, Upload } from './SharedMediaTypes'; import { Directory, clientPathToFile, filesDirectory, pathToDirectory, publicDirectory, serverPathToFile } from './SocketData'; import { resolvedServerUrl } from './server_Initialization'; -const { spawn } = require('child_process'); -const { exec } = require('child_process'); +// eslint-disable-next-line @typescript-eslint/no-var-requires const requestImageSize = require('../client/util/request-image-size'); export enum SizeSuffix { @@ -48,6 +47,7 @@ function usingAzure() { return process.env.USE_AZURE === 'true'; } +// eslint-disable-next-line @typescript-eslint/no-namespace export namespace DashUploadUtils { export interface Size { width: number; @@ -111,7 +111,7 @@ export namespace DashUploadUtils { // .outputOptions('-c copy') // .videoCodec("copy") .save(outputFilePath) - .on('error', (err: any) => { + .on('error', err => { console.log(err); reject(); }) @@ -130,8 +130,8 @@ export namespace DashUploadUtils { } function resolveExistingFile(name: string, pat: string, directory: Directory, mimetype?: string | null, duration?: number, rawText?: string): Upload.FileResponse<Upload.FileInformation> { - const data = { size: 0, filepath: pat, name, type: mimetype ?? '', originalFilename: name, newFilename: path.basename(pat), mimetype: mimetype || null, hashAlgorithm: false as any }; - const file = { ...data, toJSON: () => ({ ...data, length: 0, filename: data.filepath.replace(/.*\//, ''), mtime: new Date(), mimetype: mimetype || null, toJson: () => undefined as any }) }; + const data = { size: 0, filepath: pat, name, type: mimetype ?? '', originalFilename: name, newFilename: path.basename(pat), mimetype: mimetype || null, hashAlgorithm: false as falsetype }; + const file = { ...data, toJSON: () => ({ ...data, length: 0, filename: data.filepath.replace(/.*\//, ''), mtime: new Date(), mimetype: mimetype || null }) }; return { source: file || null, result: { @@ -184,11 +184,10 @@ export namespace DashUploadUtils { const parseExifData = async (source: string) => { const image = await request.get(source, { encoding: null }); - const { /* data, */ error } = await new Promise<{ data: any; error: any }>(resolve => { + const { /* data, */ error } = await new Promise<{ data: ExifData; error: string | undefined }>(resolve => { // eslint-disable-next-line no-new new ExifImage({ image }, (exifError, data) => { - const reason = (exifError as any)?.code; - resolve({ data, error: reason }); + resolve({ data, error: exifError?.message }); }); }); return error ? { data: undefined, error } : { data: await exifr.parse(image), error }; @@ -252,11 +251,12 @@ export namespace DashUploadUtils { }; // Use the request library to parse out file level image information in the headers - const { headers } = await new Promise<any>((resolve, reject) => { - request.head(resolvedUrl, (error, res) => (error ? reject(error) : resolve(res))); + const headerResult = await new Promise<{ headers: { [key: string]: string } }>((resolve, reject) => { + request.head(resolvedUrl, (error, res) => (error ? reject(error) : resolve(res as { headers: { [key: string]: string } }))); }).catch(e => { console.log('Error processing headers: ', e); }); + const { headers } = headerResult !== null && typeof headerResult === 'object' ? headerResult : { headers: {} as { [key: string]: string } }; try { // Compute the native width and height ofthe image with an npm module @@ -272,9 +272,9 @@ export namespace DashUploadUtils { filename, ...results, }; - } catch (e: any) { + } catch (e: unknown) { console.log(e); - return e; + return new Error(e ? e.toString?.() : 'unkown error'); } }; @@ -331,7 +331,7 @@ export namespace DashUploadUtils { )); // prettier-ignore return Jimp.read(imgBuffer) - .then(async (imgIn: any) => { + .then(async imgIn => { let img = imgIn; await Promise.all( sizes.filter(({ width }) => width).map(({ width, suffix }) => { img = img.resize(width, Jimp.AUTO).write(outputPath(suffix)); @@ -339,7 +339,7 @@ export namespace DashUploadUtils { } )); // prettier-ignore return writtenFiles; }) - .catch((e: any) => { + .catch(e => { console.log('ERROR' + e); return writtenFiles; }); @@ -432,15 +432,17 @@ export namespace DashUploadUtils { * 4) the content type of the image, i.e. image/(jpeg | png | ...) */ export const UploadImage = async (source: string, filename?: string, prefix: string = ''): Promise<Upload.ImageInformation | Error> => { - const metadata = await InspectImage(source); - if (metadata instanceof Error) { - return { name: metadata.name, message: metadata.message }; + const result = await InspectImage(source); + if (result instanceof Error) { + return { name: result.name, message: result.message }; } - const outputFile = filename || metadata.filename || ''; + const outputFile = filename || result.filename || ''; - return UploadInspectedImage(metadata, outputFile, prefix); + return UploadInspectedImage(result, outputFile, prefix); }; + type md5 = 'md5'; + type falsetype = false; export function uploadYoutube(videoId: string, overwriteId: string): Promise<Upload.FileResponse> { return new Promise<Upload.FileResponse<Upload.FileInformation>>(res => { const name = videoId; @@ -448,6 +450,7 @@ export namespace DashUploadUtils { const finalPath = serverPathToFile(Directory.videos, filepath); if (existsSync(finalPath)) { uploadProgress.set(overwriteId, 'computing duration'); + // eslint-disable-next-line @typescript-eslint/no-explicit-any exec(`yt-dlp -o ${finalPath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any /* , stderr: any */) => { const time = Array.from(stdout.trim().split(':')).reverse(); const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0); @@ -457,14 +460,17 @@ export namespace DashUploadUtils { uploadProgress.set(overwriteId, 'starting download'); const ytdlp = spawn(`yt-dlp`, ['-o', filepath, `https://www.youtube.com/watch?v=${videoId}`, '--max-filesize', '100M', '-f', 'mp4']); + // eslint-disable-next-line @typescript-eslint/no-explicit-any ytdlp.stdout.on('data', (data: any) => uploadProgress.set(overwriteId, data.toString())); let errors = ''; + // eslint-disable-next-line @typescript-eslint/no-explicit-any ytdlp.stderr.on('data', (data: any) => { uploadProgress.set(overwriteId, 'error:' + data.toString()); errors = data.toString(); }); + // eslint-disable-next-line @typescript-eslint/no-explicit-any ytdlp.on('exit', (code: any) => { if (code) { res({ @@ -484,8 +490,8 @@ export namespace DashUploadUtils { exec(`yt-dlp-o ${filepath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (/* error: any, stdout: any, stderr: any */) => { // const time = Array.from(stdout.trim().split(':')).reverse(); // const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0); - const data = { size: 0, filepath, name, mimetype: 'video', originalFilename: name, newFilename: name, hashAlgorithm: 'md5' as 'md5', type: 'video/mp4' }; - const file = { ...data, toJSON: () => ({ ...data, length: 0, filename: data.filepath.replace(/.*\//, ''), mtime: new Date(), toJson: () => undefined as any }) }; + const data = { size: 0, filepath, name, mimetype: 'video', originalFilename: name, newFilename: name, hashAlgorithm: 'md5' as md5, type: 'video/mp4' }; + const file = { ...data, toJSON: () => ({ ...data, length: 0, filename: data.filepath.replace(/.*\//, ''), mtime: new Date() }) }; MoveParsedFile(file, Directory.videos).then(output => res(output)); }); } @@ -517,15 +523,15 @@ export namespace DashUploadUtils { }); } const dataBuffer = readFileSync(file.filepath); - const result: ParsedPDF | any = await parse(dataBuffer).catch((e: any) => e); - if (!result.code) { + const result: parse.Result = await parse(dataBuffer).catch(e => e); + if (result) { await new Promise<void>((resolve, reject) => { const writeStream = createWriteStream(serverPathToFile(Directory.text, textFilename)); writeStream.write(result?.text, error => (error ? reject(error) : resolve())); }); return MoveParsedFile(file, Directory.pdfs, undefined, result?.text, undefined, fileKey); } - return { source: file, result: { name: 'faile pdf pupload', message: `Could not upload (${file.originalFilename}).${result.message}` } }; + return { source: file, result: { name: 'faile pdf pupload', message: `Could not upload (${file.originalFilename}).${result}` } }; } async function UploadCsv(file: File) { @@ -563,7 +569,7 @@ export namespace DashUploadUtils { .videoCodec('copy') // this will copy the data instead of reencode it .save(vidFile.filepath.replace('.mkv', '.mp4')) .on('end', res) - .on('error', (e: any) => console.log(e)); + .on('error', console.log); }); vidFile.filepath = vidFile.filepath.replace('.mkv', '.mp4'); format = '.mp4'; @@ -571,8 +577,9 @@ export namespace DashUploadUtils { if (format.includes('quicktime')) { let abort = false; await new Promise<void>(res => { - ffmpeg.ffprobe(vidFile.filepath, (err: any, metadata: any) => { - if (metadata.streams.some((stream: any) => stream.codec_name === 'hevc')) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ffmpeg.ffprobe(vidFile.filepath, (err: any, metadata: ffmpeg.FfprobeData) => { + if (metadata.streams.some(stream => stream.codec_name === 'hevc')) { abort = true; } res(); diff --git a/src/server/IDatabase.ts b/src/server/IDatabase.ts index 2274792b3..481b64d4a 100644 --- a/src/server/IDatabase.ts +++ b/src/server/IDatabase.ts @@ -1,5 +1,5 @@ import * as mongodb from 'mongodb'; -import { Transferable } from './Message'; +import { serializedDoctype } from '../fields/ObjectField'; export const DocumentsCollection = 'documents'; export interface IDatabase { @@ -13,10 +13,10 @@ export interface IDatabase { dropSchema(...schemaNames: string[]): Promise<any>; - insert(value: any, collectionName?: string): Promise<void>; + insert(value: { _id: string }, collectionName?: string): Promise<void>; - getDocument(id: string, fn: (result?: Transferable) => void, collectionName?: string): void; - getDocuments(ids: string[], fn: (result: Transferable[]) => void, collectionName?: string): void; + getDocument(id: string, fn: (result?: serializedDoctype) => void, collectionName?: string): void; + getDocuments(ids: string[], fn: (result: serializedDoctype[]) => void, collectionName?: string): void; getCollectionNames(): Promise<string[]>; visit(ids: string[], fn: (result: any) => string[] | Promise<string[]>, collectionName?: string): Promise<void>; diff --git a/src/server/MemoryDatabase.ts b/src/server/MemoryDatabase.ts index 1432d91c4..b838cb61b 100644 --- a/src/server/MemoryDatabase.ts +++ b/src/server/MemoryDatabase.ts @@ -1,6 +1,6 @@ import * as mongodb from 'mongodb'; +import { serializedDoctype } from '../fields/ObjectField'; import { DocumentsCollection, IDatabase } from './IDatabase'; -import { Transferable } from './Message'; export class MemoryDatabase implements IDatabase { private db: { [collectionName: string]: { [id: string]: any } } = {}; @@ -81,10 +81,10 @@ export class MemoryDatabase implements IDatabase { return Promise.resolve(); } - public getDocument(id: string, fn: (result?: Transferable) => void, collectionName = DocumentsCollection): void { + public getDocument(id: string, fn: (result?: serializedDoctype) => void, collectionName = DocumentsCollection): void { fn(this.getCollection(collectionName)[id]); } - public getDocuments(ids: string[], fn: (result: Transferable[]) => void, collectionName = DocumentsCollection): void { + public getDocuments(ids: string[], fn: (result: serializedDoctype[]) => void, collectionName = DocumentsCollection): void { fn(ids.map(id => this.getCollection(collectionName)[id])); } diff --git a/src/server/Message.ts b/src/server/Message.ts index 03150c841..b904a5ba3 100644 --- a/src/server/Message.ts +++ b/src/server/Message.ts @@ -1,5 +1,6 @@ import * as uuid from 'uuid'; import { Point } from '../pen-gestures/ndollar'; +import { serverOpType } from '../fields/ObjectField'; function GenerateDeterministicGuid(seed: string): string { return uuid.v5(seed, uuid.v5.URL); @@ -22,52 +23,12 @@ export class Message<T> { } } -export enum Types { - Number, - List, - Key, - Image, - Web, - Document, - Text, - Icon, - RichText, - DocumentReference, - Html, - Video, - Audio, - Ink, - PDF, - Tuple, - Boolean, - Script, - Templates, -} - -export interface Transferable { - readonly id: string; - readonly type: Types; - readonly data?: any; -} - -export enum YoutubeQueryTypes { - Channels, - SearchVideo, - VideoDetails, -} - -export interface YoutubeQueryInput { - readonly type: YoutubeQueryTypes; - readonly userInput?: string; - readonly videoIds?: string; -} - export interface Reference { readonly id: string; } export interface Diff extends Reference { - readonly diff: any; + readonly diff: serverOpType; } export interface GestureContent { @@ -77,48 +38,25 @@ export interface GestureContent { readonly color?: string; } -export interface MobileInkOverlayContent { - readonly enableOverlay: boolean; - readonly width?: number; - readonly height?: number; - readonly text?: string; -} - -export interface UpdateMobileInkOverlayPositionContent { - readonly dx?: number; - readonly dy?: number; - readonly dsize?: number; -} - -export interface MobileDocumentUploadContent { - readonly docId: string; -} - export interface RoomMessage { readonly message: string; readonly room: string; } +// eslint-disable-next-line @typescript-eslint/no-namespace export namespace MessageStore { export const Foo = new Message<string>('Foo'); export const Bar = new Message<string>('Bar'); - export const SetField = new Message<Transferable>('Set Field'); // send Transferable (no reply) - export const GetField = new Message<string>('Get Field'); // send string 'id' get Transferable back - export const GetFields = new Message<string[]>('Get Fields'); // send string[] of 'id' get Transferable[] back export const GetDocument = new Message<string>('Get Document'); - export const DeleteAll = new Message<any>('Delete All'); + export const DeleteAll = new Message<unknown>('Delete All'); export const ConnectionTerminated = new Message<string>('Connection Terminated'); export const GesturePoints = new Message<GestureContent>('Gesture Points'); - export const MobileInkOverlayTrigger = new Message<MobileInkOverlayContent>('Trigger Mobile Ink Overlay'); - export const UpdateMobileInkOverlayPosition = new Message<UpdateMobileInkOverlayPositionContent>('Update Mobile Ink Overlay Position'); - export const MobileDocumentUpload = new Message<MobileDocumentUploadContent>('Upload Document From Mobile'); export const GetRefField = new Message<string>('Get Ref Field'); export const GetRefFields = new Message<string[]>('Get Ref Fields'); export const UpdateField = new Message<Diff>('Update Ref Field'); - export const CreateField = new Message<Reference>('Create Ref Field'); - export const YoutubeApiQuery = new Message<YoutubeQueryInput>('Youtube Api Query'); + export const CreateDocField = new Message<Reference>('Create Ref Field'); export const DeleteField = new Message<string>('Delete field'); export const DeleteFields = new Message<string[]>('Delete fields'); diff --git a/src/server/Search.ts b/src/server/Search.ts index b21ee853a..06af18776 100644 --- a/src/server/Search.ts +++ b/src/server/Search.ts @@ -3,6 +3,7 @@ import * as rp from 'request-promise'; const pathTo = (relative: string) => `http://localhost:8983/solr/dash/${relative}`; +// eslint-disable-next-line @typescript-eslint/no-namespace export namespace Search { export async function updateDocument(document: any) { try { diff --git a/src/server/SharedMediaTypes.ts b/src/server/SharedMediaTypes.ts index 8ae13454e..680db9cd0 100644 --- a/src/server/SharedMediaTypes.ts +++ b/src/server/SharedMediaTypes.ts @@ -1,6 +1,7 @@ import { ExifData } from 'exif'; import { File } from 'formidable'; +// eslint-disable-next-line @typescript-eslint/no-namespace export namespace AcceptableMedia { export const gifs = ['.gif']; export const pngs = ['.png']; @@ -18,6 +19,7 @@ export enum AudioAnnoState { playing = 'playing', } +// eslint-disable-next-line @typescript-eslint/no-namespace export namespace Upload { export function isImageInformation(uploadResponse: Upload.FileInformation): uploadResponse is Upload.ImageInformation { return 'nativeWidth' in uploadResponse; @@ -36,7 +38,7 @@ export namespace Upload { duration?: number; } export interface EnrichedExifData { - data: ExifData & ExifData['gps']; + data: ExifData & ExifData['gps'] & { Orientation?: string }; error?: string; } export interface InspectionResults { diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index d3acc968b..47206f415 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -21,6 +21,7 @@ const scope = ['documents.readonly', 'documents', 'presentations', 'presentation * This namespace manages server side authentication for Google API queries, either * from the standard v1 APIs or the Google Photos REST API. */ +// eslint-disable-next-line @typescript-eslint/no-namespace export namespace GoogleApiServerUtils { /** * As we expand out to more Google APIs that are accessible from diff --git a/src/server/database.ts b/src/server/database.ts index ff8584cd7..975b9eb80 100644 --- a/src/server/database.ts +++ b/src/server/database.ts @@ -1,3 +1,4 @@ +/* eslint-disable @typescript-eslint/no-namespace */ import * as mongodb from 'mongodb'; import * as mongoose from 'mongoose'; import { Opt } from '../fields/Doc'; @@ -5,11 +6,11 @@ import { emptyFunction, Utils } from '../Utils'; import { GoogleApiServerUtils } from './apis/google/GoogleApiServerUtils'; import { DocumentsCollection, IDatabase } from './IDatabase'; import { MemoryDatabase } from './MemoryDatabase'; -import { Transferable } from './Message'; import { Upload } from './SharedMediaTypes'; +import { serializedDoctype } from '../fields/ObjectField'; export namespace Database { - export let disconnect: Function; + export let disconnect: () => void; class DocSchema implements mongodb.BSON.Document { _id!: string; @@ -31,7 +32,7 @@ export namespace Database { try { const { connection } = mongoose; disconnect = async () => - new Promise<any>(resolve => { + new Promise<void>(resolve => { connection.close().then(resolve); }); if (connection.readyState === ConnectionStates.disconnected) { @@ -84,6 +85,7 @@ export namespace Database { if (this.db) { const collection = this.db.collection<DocSchema>(collectionName); const prom = this.currentWrites[id]; + // eslint-disable-next-line prefer-const let newProm: Promise<void>; const run = (): Promise<void> => new Promise<void>(resolve => { @@ -112,6 +114,7 @@ export namespace Database { if (this.db) { const collection = this.db.collection<DocSchema>(collectionName); const prom = this.currentWrites[id]; + // eslint-disable-next-line prefer-const let newProm: Promise<void>; const run = (): Promise<void> => new Promise<void>(resolve => { @@ -196,6 +199,7 @@ export namespace Database { const id = value._id; const collection = this.db.collection<DocSchema>(collectionName); const prom = this.currentWrites[id]; + // eslint-disable-next-line prefer-const let newProm: Promise<void>; const run = (): Promise<void> => new Promise<void>(resolve => { @@ -219,7 +223,7 @@ export namespace Database { return undefined; } - public getDocument(id: string, fn: (result?: Transferable) => void, collectionName = DocumentsCollection) { + public getDocument(id: string, fn: (result?: serializedDoctype) => void, collectionName = DocumentsCollection) { if (this.db) { const collection = this.db.collection<DocSchema>(collectionName); collection.findOne({ _id: id }).then(resultIn => { @@ -237,7 +241,7 @@ export namespace Database { } } - public async getDocuments(ids: string[], fn: (result: Transferable[]) => void, collectionName = DocumentsCollection) { + public async getDocuments(ids: string[], fn: (result: serializedDoctype[]) => void, collectionName = DocumentsCollection) { if (this.db) { const found = await this.db .collection<DocSchema>(collectionName) diff --git a/src/server/index.ts b/src/server/index.ts index 3151c2975..3e0d86814 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -29,7 +29,6 @@ import initializeServer from './server_Initialization'; dotenv.config(); export const onWindows = process.platform === 'win32'; -// eslint-disable-next-line import/no-mutable-exports export let sessionAgent: AppliedSessionAgent; /** diff --git a/src/server/server_Initialization.ts b/src/server/server_Initialization.ts index 9183688c6..2190e27c7 100644 --- a/src/server/server_Initialization.ts +++ b/src/server/server_Initialization.ts @@ -29,7 +29,6 @@ import { WebSocket } from './websocket'; export type RouteSetter = (server: RouteManager) => void; // export let disconnect: Function; -// eslint-disable-next-line import/no-mutable-exports export let resolvedServerUrl: string; const week = 7 * 24 * 60 * 60 * 1000; @@ -115,12 +114,12 @@ function registerEmbeddedBrowseRelativePathHandler(server: express.Express) { } function proxyServe(req: any, requrl: string, response: any) { - // eslint-disable-next-line global-require + // eslint-disable-next-line global-require, @typescript-eslint/no-var-requires const htmlBodyMemoryStream = new (require('memorystream'))(); let wasinBrFormat = false; const sendModifiedBody = () => { const header = response.headers['content-encoding']; - const refToCors = (match: any, tag: string, sym: string, href: string) => `${tag}=${sym + resolvedServerUrl}/corsProxy/${href + sym}`; + const refToCors = (match: string, tag: string, sym: string, href: string) => `${tag}=${sym + resolvedServerUrl}/corsProxy/${href + sym}`; // const relpathToCors = (match: any, href: string, offset: any, string: any) => `="${resolvedServerUrl + '/corsProxy/' + decodeURIComponent(req.originalUrl.split('/corsProxy/')[1].match(/https?:\/\/[^\/]*/)?.[0] ?? '') + '/' + href}"`; if (header) { try { @@ -238,7 +237,7 @@ function registerAuthenticationRoutes(server: express.Express) { export default async function InitializeServer(routeSetter: RouteSetter) { const isRelease = determineEnvironment(); const app = buildWithMiddleware(express()); - const compiler = webpack(config as any); + const compiler = webpack(config as webpack.Configuration); // route table managed by express. routes are tested sequentially against each of these map rules. when a match is found, the handler is called to process the request app.use(wdm(compiler, { publicPath: config.output.publicPath })); diff --git a/src/server/websocket.ts b/src/server/websocket.ts index cece8a1b7..ccbcb1c5f 100644 --- a/src/server/websocket.ts +++ b/src/server/websocket.ts @@ -3,66 +3,33 @@ import { createServer } from 'https'; import * as _ from 'lodash'; import { networkInterfaces } from 'os'; import { Server, Socket } from 'socket.io'; +import { SecureContextOptions } from 'tls'; import { ServerUtils } from '../ServerUtils'; +import { serializedDoctype, serializedFieldsType } from '../fields/ObjectField'; import { logPort } from './ActionUtilities'; import { Client } from './Client'; import { DashStats } from './DashStats'; import { DocumentsCollection } from './IDatabase'; -import { Diff, GestureContent, MessageStore, MobileDocumentUploadContent, MobileInkOverlayContent, Transferable, Types, UpdateMobileInkOverlayPositionContent, YoutubeQueryInput, YoutubeQueryTypes } from './Message'; -import { Search } from './Search'; +import { Diff, GestureContent, MessageStore } from './Message'; import { resolvedPorts, socketMap, timeMap, userOperations } from './SocketData'; -import { GoogleCredentialsLoader } from './apis/google/CredentialsLoader'; -import YoutubeApi from './apis/youtube/youtubeApiSample'; import { initializeGuest } from './authentication/DashUserModel'; import { Database } from './database'; +// eslint-disable-next-line @typescript-eslint/no-namespace export namespace WebSocket { let CurUser: string | undefined; - // eslint-disable-next-line import/no-mutable-exports export let _socket: Socket; - // eslint-disable-next-line import/no-mutable-exports - export let _disconnect: Function; + export let _disconnect: () => void; export const clients: { [key: string]: Client } = {}; function processGesturePoints(socket: Socket, content: GestureContent) { socket.broadcast.emit('receiveGesturePoints', content); } - function processOverlayTrigger(socket: Socket, content: MobileInkOverlayContent) { - socket.broadcast.emit('receiveOverlayTrigger', content); - } - - function processUpdateOverlayPosition(socket: Socket, content: UpdateMobileInkOverlayPositionContent) { - socket.broadcast.emit('receiveUpdateOverlayPosition', content); - } - - function processMobileDocumentUpload(socket: Socket, content: MobileDocumentUploadContent) { - socket.broadcast.emit('receiveMobileDocumentUpload', content); - } - - function HandleYoutubeQuery([query, callback]: [YoutubeQueryInput, (result?: any[]) => void]) { - const { ProjectCredentials } = GoogleCredentialsLoader; - switch (query.type) { - case YoutubeQueryTypes.Channels: - YoutubeApi.authorizedGetChannel(ProjectCredentials); - break; - case YoutubeQueryTypes.SearchVideo: - YoutubeApi.authorizedGetVideos(ProjectCredentials, query.userInput, callback); - break; - case YoutubeQueryTypes.VideoDetails: - YoutubeApi.authorizedGetVideoDetails(ProjectCredentials, query.videoIds, callback); - break; - default: - } - } - export async function doDelete(onlyFields = true) { const target: string[] = []; onlyFields && target.push(DocumentsCollection); await Database.Instance.dropSchema(...target); - if (process.env.DISABLE_SEARCH !== 'true') { - await Search.clear(); - } initializeGuest(); } @@ -82,137 +49,59 @@ export namespace WebSocket { DashStats.logUserLogin(userEmail); } - function getField([id, callback]: [string, (result?: Transferable) => void]) { - Database.Instance.getDocument(id, (result?: Transferable) => callback(result)); - } - - function getFields([ids, callback]: [string[], (result: Transferable[]) => void]) { - Database.Instance.getDocuments(ids, callback); - } - - function setField(socket: Socket, newValue: Transferable) { - Database.Instance.update(newValue.id, newValue, () => socket.broadcast.emit(MessageStore.SetField.Message, newValue)); // broadcast set value to all other clients - if (newValue.type === Types.Text) { - // if the newValue has sring type, then it's suitable for searching -- pass it to SOLR - Search.updateDocument({ id: newValue.id, data: { set: (newValue as any).data } }); - } - } - - function GetRefFieldLocal([id, callback]: [string, (result?: Transferable) => void]) { + function GetRefFieldLocal(id: string, callback: (result?: serializedDoctype | undefined) => void) { return Database.Instance.getDocument(id, callback); } - function GetRefField([id, callback]: [string, (result?: Transferable) => void]) { + function GetRefField([id, callback]: [string, (result?: serializedDoctype) => void]) { process.stdout.write(`+`); - GetRefFieldLocal([id, callback]); + GetRefFieldLocal(id, callback); } - function GetRefFields([ids, callback]: [string[], (result?: Transferable[]) => void]) { + function GetRefFields([ids, callback]: [string[], (result?: serializedDoctype[]) => void]) { process.stdout.write(`${ids.length}…`); Database.Instance.getDocuments(ids, callback); } - const suffixMap: { [type: string]: string | [string, string | ((json: any) => any)] } = { - number: '_n', - string: '_t', - boolean: '_b', - image: ['_t', 'url'], - video: ['_t', 'url'], - pdf: ['_t', 'url'], - audio: ['_t', 'url'], - web: ['_t', 'url'], - map: ['_t', 'url'], - script: ['_t', value => value.script.originalScript], - RichTextField: ['_t', value => value.Text], - date: ['_d', value => new Date(value.date).toISOString()], - proxy: ['_i', 'fieldId'], - list: [ - '_l', - list => { - const results: any[] = []; - // eslint-disable-next-line no-use-before-define - list.fields.forEach((value: any) => ToSearchTerm(value) && results.push(ToSearchTerm(value)!.value)); - return results.length ? results : null; - }, - ], - }; - - function ToSearchTerm(valIn: any): { suffix: string; value: any } | undefined { - let val = valIn; - if (val === null || val === undefined) { - return undefined; - } - const type = val.__type || typeof val; - - let suffix = suffixMap[type]; - if (!suffix) { - return undefined; - } - if (Array.isArray(suffix)) { - const accessor = suffix[1]; - if (typeof accessor === 'function') { - val = accessor(val); - } else { - val = val[accessor]; - } - [suffix] = suffix; - } - return { suffix, value: val }; - } - - function getSuffix(value: string | [string, any]): string { - return typeof value === 'string' ? value : value[0]; - } const pendingOps = new Map<string, { diff: Diff; socket: Socket }[]>(); - function dispatchNextOp(id: string) { - const next = pendingOps.get(id)!.shift(); + function dispatchNextOp(id: string): unknown { + const next = pendingOps.get(id)?.shift(); + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const nextOp = (res: boolean) => dispatchNextOp(id); if (next) { const { diff, socket } = next; - if (diff.diff.$addToSet) { - // eslint-disable-next-line no-use-before-define - return GetRefFieldLocal([diff.id, (result?: Transferable) => addToListField(socket, diff, result)]); // would prefer to have Mongo handle list additions direclty, but for now handle it on our own + // ideally, we'd call the Database update method for all actions, but for now we handle list insertion/removal on our own + switch (diff.diff.$addToSet ? 'add' : diff.diff.$remFromSet ? 'rem' : 'set') { + case 'add': return GetRefFieldLocal(id, (result) => addToListField(socket, diff, result, nextOp)); // prettier-ignore + case 'rem': return GetRefFieldLocal(id, (result) => remFromListField(socket, diff, result, nextOp)); // prettier-ignore + default: return Database.Instance.update(id, diff.diff, + () => nextOp(socket.broadcast.emit(MessageStore.UpdateField.Message, diff)), + false + ); // prettier-ignore } - if (diff.diff.$remFromSet) { - // eslint-disable-next-line no-use-before-define - return GetRefFieldLocal([diff.id, (result?: Transferable) => remFromListField(socket, diff, result)]); // would prefer to have Mongo handle list additions direclty, but for now handle it on our own - } - // eslint-disable-next-line no-use-before-define - return SetField(socket, diff); } - return !pendingOps.get(id)!.length && pendingOps.delete(id); + return !pendingOps.get(id)?.length && pendingOps.delete(id); } - function addToListField(socket: Socket, diffIn: Diff, curListItems?: Transferable): void { - const diff = diffIn; - diff.diff.$set = diff.diff.$addToSet; - delete diff.diff.$addToSet; // convert add to set to a query of the current fields, and then a set of the composition of the new fields with the old ones - const updatefield = Array.from(Object.keys(diff.diff.$set))[0]; - const newListItems = diff.diff.$set[updatefield]?.fields; - if (!newListItems) { - console.log('Error: addToListField - no new list items'); - return; - } - const curList = (curListItems as any)?.fields?.[updatefield.replace('fields.', '')]?.fields.filter((item: any) => item !== undefined) || []; - diff.diff.$set[updatefield].fields = [...curList, ...newListItems]; // , ...newListItems.filter((newItem: any) => newItem === null || !curList.some((curItem: any) => curItem.fieldId ? curItem.fieldId === newItem.fieldId : curItem.heading ? curItem.heading === newItem.heading : curItem === newItem))]; - const sendBack = diff.diff.length !== diff.diff.$set[updatefield].fields.length; - delete diff.diff.length; - Database.Instance.update( - diff.id, - diff.diff, - () => { - if (sendBack) { - console.log('Warning: list modified during update. Composite list is being returned.'); - const { id } = socket; - (socket as any).id = ''; // bcz: HACK. this prevents the update message from going back to the client that made the change. - socket.broadcast.emit(MessageStore.UpdateField.Message, diff); - (socket as any).id = id; - } else { - socket.broadcast.emit(MessageStore.UpdateField.Message, diff); - } - dispatchNextOp(diff.id); - }, - false - ); + function addToListField(socket: Socket, diff: Diff, listDoc: serializedDoctype | undefined, cb: (res: boolean) => void): void { + const $addToSet = diff.diff.$addToSet as serializedFieldsType; + const updatefield = Array.from(Object.keys($addToSet ?? {}))[0]; + const newListItems = $addToSet?.[updatefield]?.fields; + + if (newListItems) { + const length = diff.diff.$addToSet?.length; + diff.diff.$set = $addToSet; // convert add to set to a query of the current fields, and then a set of the composition of the new fields with the old ones + delete diff.diff.$addToSet; // can't pass $set to Mongo, or it will do that insetead of $addToSet + const listItems = listDoc?.fields?.[updatefield.replace('fields.', '')]?.fields.filter(item => item) ?? []; + diff.diff.$set[updatefield]!.fields = [...listItems, ...newListItems]; // , ...newListItems.filter((newItem: any) => newItem === null || !curList.some((curItem: any) => curItem.fieldId ? curItem.fieldId === newItem.fieldId : curItem.heading ? curItem.heading === newItem.heading : curItem === newItem))]; + + // if the client's list length is not the same as what we're writing to the server, + // then we need to send the server's version back to the client so that they are in synch. + // this could happen if another client made a change before the server receives the update from the first client + const target = length !== diff.diff.$set[updatefield].fields.length ? socket : socket.broadcast; + target === socket && console.log('Warning: SEND BACK: list modified during add update. Composite list is being returned.'); + Database.Instance.update(diff.id, diff.diff, () => cb(target.emit(MessageStore.UpdateField.Message, diff)), false); + } else cb(false); } /** @@ -227,7 +116,7 @@ export namespace WebSocket { * the data * @returns the closest index with the same value or -1 if the element was not found. */ - function findClosestIndex(list: any, indexesToDelete: number[], value: any, hintIndex: number) { + function findClosestIndex(list: { fieldId: string; __type: string }[], indexesToDelete: number[], value: { fieldId: string; __type: string }, hintIndex: number) { let closestIndex = -1; for (let i = 0; i < list.length; i++) { if (list[i] === value && !indexesToDelete.includes(i)) { @@ -251,140 +140,81 @@ export namespace WebSocket { * items to delete) * @param curListItems the server's current copy of the data */ - function remFromListField(socket: Socket, diffIn: Diff, curListItems?: Transferable): void { - const diff = diffIn; - diff.diff.$set = diff.diff.$remFromSet; - delete diff.diff.$remFromSet; - const updatefield = Array.from(Object.keys(diff.diff.$set))[0]; - const remListItems = diff.diff.$set[updatefield].fields; - const curList = (curListItems as any)?.fields?.[updatefield.replace('fields.', '')]?.fields.filter((f: any) => f !== null) || []; - const { hint } = diff.diff.$set; - - if (hint) { - // indexesToRemove stores the indexes that we mark for deletion, which is later used to filter the list (delete the elements) - const indexesToRemove: number[] = []; - for (let i = 0; i < hint.deleteCount; i++) { - if (curList.length > i + hint.start && _.isEqual(curList[i + hint.start], remListItems[i])) { - indexesToRemove.push(i + hint.start); - } else { - const closestIndex = findClosestIndex(curList, indexesToRemove, remListItems[i], i + hint.start); - if (closestIndex !== -1) { - indexesToRemove.push(closestIndex); + function remFromListField(socket: Socket, diff: Diff, curListItems: serializedDoctype | undefined, cb: (res: boolean) => void): void { + const $remFromSet = diff.diff.$remFromSet as serializedFieldsType; + const updatefield = Array.from(Object.keys($remFromSet ?? {}))[0]; + const remListItems = $remFromSet?.[updatefield]?.fields; + + if (remListItems) { + const hint = diff.diff.$remFromSet?.hint; + const length = diff.diff.$remFromSet?.length; + diff.diff.$set = $remFromSet; // convert rem from set to a query of the current fields, and then a set of the old fields minus the removed ones + delete diff.diff.$remFromSet; // can't pass $set to Mongo, or it will do that insetead of $remFromSet + const curList = curListItems?.fields?.[updatefield.replace('fields.', '')]?.fields.filter(f => f) ?? []; + + if (hint) { + // indexesToRemove stores the indexes that we mark for deletion, which is later used to filter the list (delete the elements) + const indexesToRemove: number[] = []; + for (let i = 0; i < hint.deleteCount; i++) { + if (curList.length > i + hint.start && _.isEqual(curList[i + hint.start], remListItems[i])) { + indexesToRemove.push(i + hint.start); } else { - console.log('Item to delete was not found - index = -1'); + const closestIndex = findClosestIndex(curList, indexesToRemove, remListItems[i], i + hint.start); + if (closestIndex !== -1) { + indexesToRemove.push(closestIndex); + } else { + console.log('Item to delete was not found'); + } } } + diff.diff.$set[updatefield]!.fields = curList.filter((curItem, index) => !indexesToRemove.includes(index)); + } else { + // if we didn't get a hint, remove all matching items from the list + diff.diff.$set[updatefield]!.fields = curList?.filter(curItem => !remListItems.some(remItem => (remItem.fieldId ? remItem.fieldId === curItem.fieldId : remItem.heading ? remItem.heading === curItem.heading : remItem === curItem))); } - diff.diff.$set[updatefield].fields = curList?.filter((curItem: any, index: number) => !indexesToRemove.includes(index)); - } else { - // go back to the original way to delete if we didn't receive - // a hint from the client - diff.diff.$set[updatefield].fields = curList?.filter( - (curItem: any) => !remListItems.some((remItem: any) => (remItem.fieldId ? remItem.fieldId === curItem.fieldId : remItem.heading ? remItem.heading === curItem.heading : remItem === curItem)) - ); - } - - // if the client and server have different versions of the data after - // deletion, they will have different lengths and the server will - // send its version of the data to the client - const sendBack = diff.diff.length !== diff.diff.$set[updatefield].fields.length; - delete diff.diff.length; - Database.Instance.update( - diff.id, - diff.diff, - () => { - if (sendBack) { - // the two copies are different, so the server sends its copy. - console.log('SEND BACK'); - const { id } = socket; - (socket as any).id = ''; // bcz: HACK. this prevents the update message from going back to the client that made the change. - socket.broadcast.emit(MessageStore.UpdateField.Message, diff); - (socket as any).id = id; - } else { - socket.broadcast.emit(MessageStore.UpdateField.Message, diff); - } - dispatchNextOp(diff.id); - }, - false - ); + // if the client's list length is not the same as what we're writing to the server, + // then we need to send the server's version back to the client so that they are in synch. + // this could happen if another client made a change before the server receives the update from the first client + const target = length !== diff.diff.$set[updatefield].fields.length ? socket : socket.broadcast; + target === socket && console.log('Warning: SEND BACK: list modified during remove update. Composite list is being returned.'); + Database.Instance.update(diff.id, diff.diff, () => cb(target.emit(MessageStore.UpdateField.Message, diff)), false); + } else cb(false); } function UpdateField(socket: Socket, diff: Diff) { const curUser = socketMap.get(socket); - if (!curUser) return false; - const currentUsername = curUser.split(' ')[0]; - userOperations.set(currentUsername, userOperations.get(currentUsername) !== undefined ? userOperations.get(currentUsername)! + 1 : 0); + if (curUser) { + const currentUsername = curUser.split(' ')[0]; + userOperations.set(currentUsername, userOperations.get(currentUsername) !== undefined ? userOperations.get(currentUsername)! + 1 : 0); - if (CurUser !== socketMap.get(socket)) { - CurUser = socketMap.get(socket); - console.log('Switch User: ' + CurUser); - } - if (pendingOps.has(diff.id)) { - pendingOps.get(diff.id)!.push({ diff, socket }); - return true; - } - pendingOps.set(diff.id, [{ diff, socket }]); - if (diff.diff.$addToSet) { - return GetRefFieldLocal([diff.id, (result?: Transferable) => addToListField(socket, diff, result)]); // would prefer to have Mongo handle list additions direclty, but for now handle it on our own - } - if (diff.diff.$remFromSet) { - return GetRefFieldLocal([diff.id, (result?: Transferable) => remFromListField(socket, diff, result)]); // would prefer to have Mongo handle list additions direclty, but for now handle it on our own - } - // eslint-disable-next-line no-use-before-define - return SetField(socket, diff); - } - function SetField(socket: Socket, diff: Diff /* , curListItems?: Transferable */) { - Database.Instance.update(diff.id, diff.diff, () => socket.broadcast.emit(MessageStore.UpdateField.Message, diff), false); - const docfield = diff.diff.$set || diff.diff.$unset; - if (docfield) { - const update: any = { id: diff.id }; - let dynfield = false; - // eslint-disable-next-line no-restricted-syntax - for (let key in docfield) { - // eslint-disable-next-line no-continue - if (!key.startsWith('fields.')) continue; - dynfield = true; - const val = docfield[key]; - key = key.substring(7); - Object.values(suffixMap).forEach(suf => { - update[key + getSuffix(suf)] = { set: null }; - }); - const term = ToSearchTerm(val); - if (term !== undefined) { - const { suffix, value } = term; - update[key + suffix] = { set: value }; - if (key.endsWith('modificationDate')) { - update['modificationDate' + suffix] = value; - } - } + if (CurUser !== socketMap.get(socket)) { + CurUser = socketMap.get(socket); + console.log('Switch User: ' + CurUser); } - if (dynfield) { - Search.updateDocument(update); + if (pendingOps.has(diff.id)) { + pendingOps.get(diff.id)!.push({ diff, socket }); + return true; } + pendingOps.set(diff.id, [{ diff, socket }]); + return dispatchNextOp(diff.id); } - dispatchNextOp(diff.id); + return false; } function DeleteField(socket: Socket, id: string) { - Database.Instance.delete({ _id: id }).then(() => { - socket.broadcast.emit(MessageStore.DeleteField.Message, id); - }); - - Search.deleteDocuments([id]); + Database.Instance.delete({ _id: id }).then(() => socket.broadcast.emit(MessageStore.DeleteField.Message, id)); } function DeleteFields(socket: Socket, ids: string[]) { - Database.Instance.delete({ _id: { $in: ids } }).then(() => { - socket.broadcast.emit(MessageStore.DeleteFields.Message, ids); - }); - Search.deleteDocuments(ids); + Database.Instance.delete({ _id: { $in: ids } }).then(() => socket.broadcast.emit(MessageStore.DeleteFields.Message, ids)); } - function CreateField(newValue: any) { + function CreateDocField(newValue: serializedDoctype) { Database.Instance.insert(newValue); } - export async function initialize(isRelease: boolean, credentials: any) { + + export async function initialize(isRelease: boolean, credentials: SecureContextOptions) { let io: Server; if (isRelease) { const { socketPort } = process.env; @@ -417,21 +247,19 @@ export namespace WebSocket { socket.in(room).emit('message', message); }); - socket.on('ipaddr', () => { + socket.on('ipaddr', () => networkInterfaces().keys?.forEach(dev => { if (dev.family === 'IPv4' && dev.address !== '127.0.0.1') { socket.emit('ipaddr', dev.address); } - }); - }); + }) + ); - socket.on('bye', () => { - console.log('received bye'); - }); + socket.on('bye', () => console.log('received bye')); socket.on('disconnect', () => { const currentUser = socketMap.get(socket); - if (!(currentUser === undefined)) { + if (currentUser !== undefined) { const currentUsername = currentUser.split(' ')[0]; DashStats.logUserLogout(currentUsername); delete timeMap[currentUsername]; @@ -441,22 +269,15 @@ export namespace WebSocket { ServerUtils.Emit(socket, MessageStore.Foo, 'handshooken'); ServerUtils.AddServerHandler(socket, MessageStore.Bar, guid => barReceived(socket, guid)); - ServerUtils.AddServerHandler(socket, MessageStore.SetField, args => setField(socket, args)); - ServerUtils.AddServerHandlerCallback(socket, MessageStore.GetField, getField); - ServerUtils.AddServerHandlerCallback(socket, MessageStore.GetFields, getFields); if (isRelease) { ServerUtils.AddServerHandler(socket, MessageStore.DeleteAll, () => doDelete(false)); } - ServerUtils.AddServerHandler(socket, MessageStore.CreateField, CreateField); - ServerUtils.AddServerHandlerCallback(socket, MessageStore.YoutubeApiQuery, HandleYoutubeQuery); + ServerUtils.AddServerHandler(socket, MessageStore.CreateDocField, CreateDocField); ServerUtils.AddServerHandler(socket, MessageStore.UpdateField, diff => UpdateField(socket, diff)); ServerUtils.AddServerHandler(socket, MessageStore.DeleteField, id => DeleteField(socket, id)); ServerUtils.AddServerHandler(socket, MessageStore.DeleteFields, ids => DeleteFields(socket, ids)); ServerUtils.AddServerHandler(socket, MessageStore.GesturePoints, content => processGesturePoints(socket, content)); - ServerUtils.AddServerHandler(socket, MessageStore.MobileInkOverlayTrigger, content => processOverlayTrigger(socket, content)); - ServerUtils.AddServerHandler(socket, MessageStore.UpdateMobileInkOverlayPosition, content => processUpdateOverlayPosition(socket, content)); - ServerUtils.AddServerHandler(socket, MessageStore.MobileDocumentUpload, content => processMobileDocumentUpload(socket, content)); ServerUtils.AddServerHandlerCallback(socket, MessageStore.GetRefField, GetRefField); ServerUtils.AddServerHandlerCallback(socket, MessageStore.GetRefFields, GetRefFields); |
