From 1bc1ed8c5c31a0d3126d27434c690b8d744971ba Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 8 Jan 2020 02:42:42 -0500 Subject: final session changes --- src/server/DashSession.ts | 61 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) create mode 100644 src/server/DashSession.ts (limited to 'src/server/DashSession.ts') diff --git a/src/server/DashSession.ts b/src/server/DashSession.ts new file mode 100644 index 000000000..9c36fa17f --- /dev/null +++ b/src/server/DashSession.ts @@ -0,0 +1,61 @@ +import { Session } from "./Session/session"; +import { Email } from "./ActionUtilities"; +import { red, yellow } from "colors"; +import { SolrManager } from "./ApiManagers/SearchManager"; +import { execSync } from "child_process"; +import { isMaster } from "cluster"; +import { Utils } from "../Utils"; +import { WebSocket } from "./Websocket/Websocket"; +import { MessageStore } from "./Message"; +import { launchServer } from "."; + +const notificationRecipients = ["samuel_wilkins@brown.edu"]; +const signature = "-Dash Server Session Manager"; + +const monitorHooks: Session.MonitorNotifierHooks = { + key: async (key, masterLog) => { + const content = `The key for this session (started @ ${new Date().toUTCString()}) is ${key}.\n\n${signature}`; + const failures = await Email.dispatchAll(notificationRecipients, "Server Termination Key", content); + if (failures) { + failures.map(({ recipient, error: { message } }) => masterLog(red(`dispatch failure @ ${recipient} (${yellow(message)})`))); + return false; + } + return true; + }, + crash: async ({ name, message, stack }, masterLog) => { + const body = [ + "You, as a Dash Administrator, are being notified of a server crash event. Here's what we know:", + `name:\n${name}`, + `message:\n${message}`, + `stack:\n${stack}`, + "The server is already restarting itself, but if you're concerned, use the Remote Desktop Connection to monitor progress.", + ].join("\n\n"); + const content = `${body}\n\n${signature}`; + const failures = await Email.dispatchAll(notificationRecipients, "Dash Web Server Crash", content); + if (failures) { + failures.map(({ recipient, error: { message } }) => masterLog(red(`dispatch failure @ ${recipient} (${yellow(message)})`))); + return false; + } + return true; + } +}; + +export class DashSessionAgent extends Session.AppliedSessionAgent { + + /** + * If we're the monitor (master) thread, we should launch the monitor logic for the session. + * Otherwise, we must be on a worker thread that was spawned *by* the monitor (master) thread, and thus + * our job should be to run the server. + */ + protected async launchImplementation() { + if (isMaster) { + this.sessionMonitor = await Session.initializeMonitorThread(monitorHooks); + this.sessionMonitor.addReplCommand("pull", [], () => execSync("git pull", { stdio: ["ignore", "inherit", "inherit"] })); + this.sessionMonitor.addReplCommand("solr", [/start|stop/g], args => SolrManager.SetRunning(args[0] === "start")); + } else { + this.serverWorker = await Session.initializeWorkerThread(launchServer); // server initialization delegated to worker + this.serverWorker.addExitHandler(() => Utils.Emit(WebSocket._socket, MessageStore.ConnectionTerminated, "Manual")); + } + } + +} \ No newline at end of file -- cgit v1.2.3-70-g09d2 From 7378b5d063d9da34d485c8384efa71ba83272a61 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 8 Jan 2020 03:42:14 -0500 Subject: session cleanup --- src/client/DocServer.ts | 4 +- src/server/DashSession.ts | 89 +++++++++++++++++++++---------------------- src/server/Session/session.ts | 69 +++++++++++++++++++++------------ src/server/index.ts | 2 +- 4 files changed, 92 insertions(+), 72 deletions(-) (limited to 'src/server/DashSession.ts') diff --git a/src/client/DocServer.ts b/src/client/DocServer.ts index 47c63bfb7..ed7fbd7ba 100644 --- a/src/client/DocServer.ts +++ b/src/client/DocServer.ts @@ -82,7 +82,9 @@ export namespace DocServer { Utils.AddServerHandler(_socket, MessageStore.UpdateField, respondToUpdate); Utils.AddServerHandler(_socket, MessageStore.DeleteField, respondToDelete); Utils.AddServerHandler(_socket, MessageStore.DeleteFields, respondToDelete); - Utils.AddServerHandler(_socket, MessageStore.ConnectionTerminated, () => alert("Your connection to the server has been terminated.")); + Utils.AddServerHandler(_socket, MessageStore.ConnectionTerminated, () => { + alert("Your connection to the server has been terminated."); + }); } function errorFunc(): never { diff --git a/src/server/DashSession.ts b/src/server/DashSession.ts index 9c36fa17f..c0ebc9687 100644 --- a/src/server/DashSession.ts +++ b/src/server/DashSession.ts @@ -3,59 +3,58 @@ import { Email } from "./ActionUtilities"; import { red, yellow } from "colors"; import { SolrManager } from "./ApiManagers/SearchManager"; import { execSync } from "child_process"; -import { isMaster } from "cluster"; import { Utils } from "../Utils"; import { WebSocket } from "./Websocket/Websocket"; import { MessageStore } from "./Message"; import { launchServer } from "."; -const notificationRecipients = ["samuel_wilkins@brown.edu"]; -const signature = "-Dash Server Session Manager"; +/** +* If we're the monitor (master) thread, we should launch the monitor logic for the session. +* Otherwise, we must be on a worker thread that was spawned *by* the monitor (master) thread, and thus +* our job should be to run the server. +*/ +export class DashSessionAgent extends Session.AppliedSessionAgent { -const monitorHooks: Session.MonitorNotifierHooks = { - key: async (key, masterLog) => { - const content = `The key for this session (started @ ${new Date().toUTCString()}) is ${key}.\n\n${signature}`; - const failures = await Email.dispatchAll(notificationRecipients, "Server Termination Key", content); - if (failures) { - failures.map(({ recipient, error: { message } }) => masterLog(red(`dispatch failure @ ${recipient} (${yellow(message)})`))); - return false; - } - return true; - }, - crash: async ({ name, message, stack }, masterLog) => { - const body = [ - "You, as a Dash Administrator, are being notified of a server crash event. Here's what we know:", - `name:\n${name}`, - `message:\n${message}`, - `stack:\n${stack}`, - "The server is already restarting itself, but if you're concerned, use the Remote Desktop Connection to monitor progress.", - ].join("\n\n"); - const content = `${body}\n\n${signature}`; - const failures = await Email.dispatchAll(notificationRecipients, "Dash Web Server Crash", content); - if (failures) { - failures.map(({ recipient, error: { message } }) => masterLog(red(`dispatch failure @ ${recipient} (${yellow(message)})`))); - return false; - } - return true; - } -}; + private readonly notificationRecipients = ["samuel_wilkins@brown.edu"]; + private readonly signature = "-Dash Server Session Manager"; -export class DashSessionAgent extends Session.AppliedSessionAgent { + protected async launchMonitor() { + const monitor = await Session.initializeMonitorThread({ + key: async (key, masterLog) => { + const content = `The key for this session (started @ ${new Date().toUTCString()}) is ${key}.\n\n${this.signature}`; + const failures = await Email.dispatchAll(this.notificationRecipients, "Server Termination Key", content); + if (failures) { + failures.map(({ recipient, error: { message } }) => masterLog(red(`dispatch failure @ ${recipient} (${yellow(message)})`))); + return false; + } + return true; + }, + crash: async ({ name, message, stack }, masterLog) => { + const body = [ + "You, as a Dash Administrator, are being notified of a server crash event. Here's what we know:", + `name:\n${name}`, + `message:\n${message}`, + `stack:\n${stack}`, + "The server is already restarting itself, but if you're concerned, use the Remote Desktop Connection to monitor progress.", + ].join("\n\n"); + const content = `${body}\n\n${this.signature}`; + const failures = await Email.dispatchAll(this.notificationRecipients, "Dash Web Server Crash", content); + if (failures) { + failures.map(({ recipient, error: { message } }) => masterLog(red(`dispatch failure @ ${recipient} (${yellow(message)})`))); + return false; + } + return true; + } + }); + monitor.addReplCommand("pull", [], () => execSync("git pull", { stdio: ["ignore", "inherit", "inherit"] })); + monitor.addReplCommand("solr", [/start|stop/g], args => SolrManager.SetRunning(args[0] === "start")); + return monitor; + } - /** - * If we're the monitor (master) thread, we should launch the monitor logic for the session. - * Otherwise, we must be on a worker thread that was spawned *by* the monitor (master) thread, and thus - * our job should be to run the server. - */ - protected async launchImplementation() { - if (isMaster) { - this.sessionMonitor = await Session.initializeMonitorThread(monitorHooks); - this.sessionMonitor.addReplCommand("pull", [], () => execSync("git pull", { stdio: ["ignore", "inherit", "inherit"] })); - this.sessionMonitor.addReplCommand("solr", [/start|stop/g], args => SolrManager.SetRunning(args[0] === "start")); - } else { - this.serverWorker = await Session.initializeWorkerThread(launchServer); // server initialization delegated to worker - this.serverWorker.addExitHandler(() => Utils.Emit(WebSocket._socket, MessageStore.ConnectionTerminated, "Manual")); - } + protected async launchServerWorker() { + const worker = await Session.initializeWorkerThread(launchServer); // server initialization delegated to worker + worker.addExitHandler(() => Utils.Emit(WebSocket._socket, MessageStore.ConnectionTerminated, "Manual")); + return worker; } } \ No newline at end of file diff --git a/src/server/Session/session.ts b/src/server/Session/session.ts index b22b6404d..cc9e7dd1a 100644 --- a/src/server/Session/session.ts +++ b/src/server/Session/session.ts @@ -22,46 +22,44 @@ export namespace Session { export abstract class AppliedSessionAgent { + public killSession(graceful = true) { + const target = isMaster ? this.sessionMonitor : this.serverWorker; + target.killSession(graceful); + } + private launched = false; - protected sessionMonitorRef: Session.Monitor | undefined; + private sessionMonitorRef: Session.Monitor | undefined; public get sessionMonitor(): Session.Monitor { if (!isMaster) { throw new Error("Cannot access the session monitor directly from the server worker thread"); } return this.sessionMonitorRef!; } - public set sessionMonitor(monitor: Session.Monitor) { - if (!isMaster) { - throw new Error("Cannot set the session monitor directly from the server worker thread"); - } - this.sessionMonitorRef = monitor; - } - protected serverWorkerRef: Session.ServerWorker | undefined; + private serverWorkerRef: Session.ServerWorker | undefined; public get serverWorker(): Session.ServerWorker { if (isMaster) { throw new Error("Cannot access the server worker directly from the session monitor thread"); } return this.serverWorkerRef!; } - public set serverWorker(worker: Session.ServerWorker) { - if (isMaster) { - throw new Error("Cannot set the server worker directly from the session monitor thread"); - } - this.serverWorkerRef = worker; - } public async launch(): Promise { if (!this.launched) { this.launched = true; - await this.launchImplementation(); + if (isMaster) { + this.sessionMonitorRef = await this.launchMonitor(); + } else { + this.serverWorkerRef = await this.launchServerWorker(); + } } else { throw new Error("Cannot launch a session thread more than once per process."); } } - protected abstract async launchImplementation(): Promise; + protected abstract async launchMonitor(): Promise; + protected abstract async launchServerWorker(): Promise; } @@ -92,11 +90,14 @@ export namespace Session { setPort: (port: "server" | "socket" | string, value: number, immediateRestart: boolean) => void; killSession: (graceful?: boolean) => never; addReplCommand: (basename: string, argPatterns: (RegExp | string)[], action: ReplAction) => void; - addChildMessageHandler: (message: string, handler: ActionHandler) => void; + addServerMessageListener: (message: string, handler: ActionHandler) => void; + removeServerMessageListener: (message: string, handler: ActionHandler) => void; + clearServerMessageListeners: (message: string) => void; } export interface ServerWorker { - killSession: () => void; + killSession: (graceful?: boolean) => void; + sendSessionAction: (message: string, args?: any) => void; addExitHandler: (handler: ExitHandler) => void; } @@ -176,7 +177,7 @@ export namespace Session { export async function initializeMonitorThread(notifiers?: MonitorNotifierHooks): Promise { console.log(timestamp(), cyan("initializing session...")); let activeWorker: Worker; - const childMessageHandlers: { [message: string]: ActionHandler } = {}; + const onMessage: { [message: string]: ActionHandler[] | undefined } = {}; // read in configuration .json file only once, in the master thread // pass down any variables the pertinent to the child processes as environment variables @@ -275,7 +276,7 @@ export namespace Session { switch (message) { case "kill": log(red("an authorized user has manually ended the server session")); - killSession(); + killSession(args.graceful); case "notify_crash": if (notifiers && notifiers.crash) { const { error } = args; @@ -287,9 +288,9 @@ export namespace Session { const { port, value, immediateRestart } = args; setPort(port, value, immediateRestart); default: - const handler = childMessageHandlers[message]; - if (handler) { - handler({ message, args }); + const handlers = onMessage[message]; + if (handlers) { + handlers.forEach(handler => handler({ message, args })); } } } else if (lifecycle) { @@ -333,7 +334,24 @@ export namespace Session { // returned to allow the caller to add custom commands return { addReplCommand: repl.registerCommand, - addChildMessageHandler: (message: string, handler: ActionHandler) => { childMessageHandlers[message] = handler; }, + addServerMessageListener: (message: string, handler: ActionHandler) => { + const handlers = onMessage[message]; + if (handlers) { + handlers.push(handler); + } else { + onMessage[message] = [handler]; + } + }, + removeServerMessageListener: (message: string, handler: ActionHandler) => { + const handlers = onMessage[message]; + if (handlers) { + const index = handlers.indexOf(handler); + if (index > -1) { + handlers.splice(index, 1); + } + } + }, + clearServerMessageListeners: (message: string) => onMessage[message] = undefined, restartServer, killSession, setPort, @@ -431,7 +449,8 @@ export namespace Session { return { addExitHandler: (handler: ExitHandler) => exitHandlers.push(handler), - killSession: () => process.send!({ action: { message: "kill" } }) + killSession: (graceful = true) => process.send!({ action: { message: "kill", args: { graceful } } }), + sendSessionAction: (message: string, args?: any) => process.send!({ action: { message, args } }) }; } diff --git a/src/server/index.ts b/src/server/index.ts index f18e9abb6..ffab0f380 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -92,7 +92,7 @@ function routeSetter({ isRelease, addSupervisedRoute, logRegistrationOutcome }: secureHandler: ({ req, res }) => { if (req.params.key === process.env.session_key) { res.send(""); - sessionAgent.serverWorker.killSession(); + sessionAgent.killSession(); } else { res.redirect("/home"); } -- cgit v1.2.3-70-g09d2 From 78bedabbbe0682d089c343ad94d90d0311bdfe0e Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 8 Jan 2020 06:30:51 -0500 Subject: graceful exiting --- src/server/DashSession.ts | 14 +- src/server/Session/session.ts | 696 ++++++++++++++++++++++++------------------ src/server/index.ts | 4 +- 3 files changed, 402 insertions(+), 312 deletions(-) (limited to 'src/server/DashSession.ts') diff --git a/src/server/DashSession.ts b/src/server/DashSession.ts index c0ebc9687..7c2cfaf8d 100644 --- a/src/server/DashSession.ts +++ b/src/server/DashSession.ts @@ -19,17 +19,19 @@ export class DashSessionAgent extends Session.AppliedSessionAgent { private readonly signature = "-Dash Server Session Manager"; protected async launchMonitor() { - const monitor = await Session.initializeMonitorThread({ - key: async (key, masterLog) => { + const monitor = new Session.Monitor({ + key: async key => { + // this sends a pseudorandomly generated guid to the configuration's recipients, allowing them alone + // to kill the server via the /kill/:key route const content = `The key for this session (started @ ${new Date().toUTCString()}) is ${key}.\n\n${this.signature}`; const failures = await Email.dispatchAll(this.notificationRecipients, "Server Termination Key", content); if (failures) { - failures.map(({ recipient, error: { message } }) => masterLog(red(`dispatch failure @ ${recipient} (${yellow(message)})`))); + failures.map(({ recipient, error: { message } }) => monitor.log(red(`dispatch failure @ ${recipient} (${yellow(message)})`))); return false; } return true; }, - crash: async ({ name, message, stack }, masterLog) => { + crash: async ({ name, message, stack }) => { const body = [ "You, as a Dash Administrator, are being notified of a server crash event. Here's what we know:", `name:\n${name}`, @@ -40,7 +42,7 @@ export class DashSessionAgent extends Session.AppliedSessionAgent { const content = `${body}\n\n${this.signature}`; const failures = await Email.dispatchAll(this.notificationRecipients, "Dash Web Server Crash", content); if (failures) { - failures.map(({ recipient, error: { message } }) => masterLog(red(`dispatch failure @ ${recipient} (${yellow(message)})`))); + failures.map(({ recipient, error: { message } }) => monitor.log(red(`dispatch failure @ ${recipient} (${yellow(message)})`))); return false; } return true; @@ -52,7 +54,7 @@ export class DashSessionAgent extends Session.AppliedSessionAgent { } protected async launchServerWorker() { - const worker = await Session.initializeWorkerThread(launchServer); // server initialization delegated to worker + const worker = new Session.ServerWorker(launchServer); // server initialization delegated to worker worker.addExitHandler(() => Utils.Emit(WebSocket._socket, MessageStore.ConnectionTerminated, "Manual")); return worker; } diff --git a/src/server/Session/session.ts b/src/server/Session/session.ts index cc9e7dd1a..8bee99f41 100644 --- a/src/server/Session/session.ts +++ b/src/server/Session/session.ts @@ -1,4 +1,4 @@ -import { red, cyan, green, yellow, magenta, blue } from "colors"; +import { red, cyan, green, yellow, magenta, blue, white } from "colors"; import { on, fork, setupMaster, Worker, isMaster } from "cluster"; import { get } from "request-promise"; import { Utils } from "../../Utils"; @@ -8,26 +8,32 @@ import { validate, ValidationError } from "jsonschema"; import { configurationSchema } from "./session_config_schema"; /** - * This namespace relies on NodeJS's cluster module, which allows a parent (master) process to share - * code with its children (workers). A simple `isMaster` flag indicates who is trying to access - * the code, and thus determines the functionality that actually gets invoked (checked by the caller, not internally). - * - * Think of the master thread as a factory, and the workers as the helpers that actually run the server. - * - * So, when we run `npm start`, given the appropriate check, initializeMaster() is called in the parent process - * This will spawn off its own child process (by default, mirrors the execution path of its parent), - * in which initializeWorker() is invoked. - */ + * This namespace relies on NodeJS's cluster module, which allows a parent (master) process to share + * code with its children (workers). A simple `isMaster` flag indicates who is trying to access + * the code, and thus determines the functionality that actually gets invoked (checked by the caller, not internally). + * + * Think of the master thread as a factory, and the workers as the helpers that actually run the server. + * + * So, when we run `npm start`, given the appropriate check, initializeMaster() is called in the parent process + * This will spawn off its own child process (by default, mirrors the execution path of its parent), + * in which initializeWorker() is invoked. + */ export namespace Session { export abstract class AppliedSessionAgent { + // the following two methods allow the developer to create a custom + // session and use the built in customization options for each thread + protected abstract async launchMonitor(): Promise; + protected abstract async launchServerWorker(): Promise; + + private launched = false; + public killSession(graceful = true) { const target = isMaster ? this.sessionMonitor : this.serverWorker; target.killSession(graceful); } - private launched = false; private sessionMonitorRef: Session.Monitor | undefined; public get sessionMonitor(): Session.Monitor { @@ -58,9 +64,6 @@ export namespace Session { } } - protected abstract async launchMonitor(): Promise; - protected abstract async launchServerWorker(): Promise; - } interface Configuration { @@ -81,377 +84,462 @@ export namespace Session { ports: { server: 3000 }, pollingRoute: "/", pollingIntervalSeconds: 30, - pollingFailureTolerance: 1 + pollingFailureTolerance: 0 }; - export interface Monitor { - log: (...optionalParams: any[]) => void; - restartServer: () => void; - setPort: (port: "server" | "socket" | string, value: number, immediateRestart: boolean) => void; - killSession: (graceful?: boolean) => never; - addReplCommand: (basename: string, argPatterns: (RegExp | string)[], action: ReplAction) => void; - addServerMessageListener: (message: string, handler: ActionHandler) => void; - removeServerMessageListener: (message: string, handler: ActionHandler) => void; - clearServerMessageListeners: (message: string) => void; - } + export type ExitHandler = (reason: Error | null) => void | Promise; - export interface ServerWorker { - killSession: (graceful?: boolean) => void; - sendSessionAction: (message: string, args?: any) => void; - addExitHandler: (handler: ExitHandler) => void; - } + export namespace Monitor { - export interface MonitorNotifierHooks { - key?: (key: string, masterLog: (...optionalParams: any[]) => void) => boolean | Promise; - crash?: (error: Error, masterLog: (...optionalParams: any[]) => void) => boolean | Promise; - } + export interface NotifierHooks { + key?: (key: string) => (boolean | Promise); + crash?: (error: Error) => (boolean | Promise); + } - export interface SessionAction { - message: string; - args: any; - } + export interface Action { + message: string; + args: any; + } + + export type ServerMessageHandler = (action: Action) => void | Promise; - export type ExitHandler = (reason: Error | null) => void | Promise; - export type ActionHandler = (action: SessionAction) => void | Promise; - export interface EmailTemplate { - subject: string; - body: string; } - function loadAndValidateConfiguration(): Configuration { - try { - console.log(timestamp(), cyan("validating configuration...")); - const configuration: Configuration = JSON.parse(readFileSync('./session.config.json', 'utf8')); - const options = { - throwError: true, - allowUnknownAttributes: false - }; - // ensure all necessary and no excess information is specified by the configuration file - validate(configuration, configurationSchema, options); - let formatMaster = true; - let formatWorker = true; - Object.keys(defaultConfiguration).forEach(property => { - if (!configuration[property]) { - if (property === "masterIdentifier") { - formatMaster = false; - } else if (property === "workerIdentifier") { - formatWorker = false; + /** + * Validates and reads the configuration file, accordingly builds a child process factory + * and spawns off an initial process that will respawn as predecessors die. + */ + export class Monitor { + + private exitHandlers: ExitHandler[] = []; + private readonly notifiers: Monitor.NotifierHooks | undefined; + private readonly configuration: Configuration; + private onMessage: { [message: string]: Monitor.ServerMessageHandler[] | undefined } = {}; + private activeWorker: Worker | undefined; + private key: string | undefined; + private repl: Repl; + + /** + * Kill this session and its active child + * server process, either gracefully (may wait + * indefinitely, but at least allows active networking + * requests to complete) or immediately. + */ + public killSession = async (graceful = true): Promise => { + this.log(cyan(`exiting session ${graceful ? "clean" : "immediate"}ly`)); + this.tryKillActiveWorker(graceful); + process.exit(0); + } + + /** + * Execute the list of functions registered to be called + * whenever the process exits. + */ + public addExitHandler = (handler: ExitHandler) => this.exitHandlers.push(handler); + + /** + * Extend the default repl by adding in custom commands + * that can invoke application logic external to this module + */ + public addReplCommand = (basename: string, argPatterns: (RegExp | string)[], action: ReplAction) => { + this.repl.registerCommand(basename, argPatterns, action); + } + + /** + * Add a listener at this message. When the monitor process + * receives a message, it will invoke all registered functions. + */ + public addServerMessageListener = (message: string, handler: Monitor.ServerMessageHandler) => { + const handlers = this.onMessage[message]; + if (handlers) { + handlers.push(handler); + } else { + this.onMessage[message] = [handler]; + } + } + + /** + * Unregister a given listener at this message. + */ + public removeServerMessageListener = (message: string, handler: Monitor.ServerMessageHandler) => { + const handlers = this.onMessage[message]; + if (handlers) { + const index = handlers.indexOf(handler); + if (index > -1) { + handlers.splice(index, 1); + } + } + } + + /** + * Unregister all listeners at this message. + */ + public clearServerMessageListeners = (message: string) => this.onMessage[message] = undefined; + + constructor(notifiers?: Monitor.NotifierHooks) { + this.notifiers = notifiers; + + console.log(this.timestamp(), cyan("initializing session...")); + + this.configuration = this.loadAndValidateConfiguration(); + this.initializeSessionKey(); + // determines whether or not we see the compilation / initialization / runtime output of each child server process + setupMaster({ silent: !this.configuration.showServerOutput }); + + // handle exceptions in the master thread - there shouldn't be many of these + // the IPC (inter process communication) channel closed exception can't seem + // to be caught in a try catch, and is inconsequential, so it is ignored + process.on("uncaughtException", ({ message, stack }): void => { + if (message !== "Channel closed") { + this.log(red(message)); + if (stack) { + this.log(`uncaught exception\n${red(stack)}`); } - configuration[property] = defaultConfiguration[property]; } }); - if (formatMaster) { - configuration.masterIdentifier = yellow(configuration.masterIdentifier + ":"); - } - if (formatWorker) { - configuration.workerIdentifier = magenta(configuration.workerIdentifier + ":"); - } - return configuration; - } catch (error) { - if (error instanceof ValidationError) { - console.log(red("\nSession configuration failed.")); - console.log("The given session.config.json configuration file is invalid."); - console.log(`${error.instance}: ${error.stack}`); - process.exit(0); - } else if (error.code === "ENOENT" && error.path === "./session.config.json") { - console.log(cyan("Loading default session parameters...")); - console.log("Consider including a session.config.json configuration file in your project root for customization."); - return defaultConfiguration; - } else { - console.log(red("\nSession configuration failed.")); - console.log("The following unknown error occurred during configuration."); - console.log(error.stack); - process.exit(0); - } + + // a helpful cluster event called on the master thread each time a child process exits + on("exit", ({ process: { pid } }, code, signal) => { + const prompt = `server worker with process id ${pid} has exited with code ${code}${signal === null ? "" : `, having encountered signal ${signal}`}.`; + this.log(cyan(prompt)); + // to make this a robust, continuous session, every time a child process dies, we immediately spawn a new one + this.spawn(); + }); + + this.repl = this.initializeRepl(); + this.spawn(); } - } - function timestamp() { - return blue(`[${new Date().toUTCString()}]`); - } - /** - * Validates and reads the configuration file, accordingly builds a child process factory - * and spawns off an initial process that will respawn as predecessors die. - */ - export async function initializeMonitorThread(notifiers?: MonitorNotifierHooks): Promise { - console.log(timestamp(), cyan("initializing session...")); - let activeWorker: Worker; - const onMessage: { [message: string]: ActionHandler[] | undefined } = {}; - - // read in configuration .json file only once, in the master thread - // pass down any variables the pertinent to the child processes as environment variables - const configuration = loadAndValidateConfiguration(); - const { - masterIdentifier, - workerIdentifier, - ports, - pollingRoute, - showServerOutput, - pollingFailureTolerance - } = configuration; - let { pollingIntervalSeconds } = configuration; - - const log = (...optionalParams: any[]) => console.log(timestamp(), masterIdentifier, ...optionalParams); - - // this sends a pseudorandomly generated guid to the configuration's recipients, allowing them alone - // to kill the server via the /kill/:key route - let key: string | undefined; - if (notifiers && notifiers.key) { - key = Utils.GenerateGuid(); - const success = await notifiers.key(key, log); - const statement = success ? green("distributed session key to recipients") : red("distribution of session key failed"); - log(statement); + /** + * Generates a blue UTC string associated with the time + * of invocation. + */ + private timestamp = () => blue(`[${new Date().toUTCString()}]`); + + /** + * A formatted, identified and timestamped log in color + */ + public log = (...optionalParams: any[]) => { + console.log(this.timestamp(), this.configuration.masterIdentifier, ...optionalParams); + } + + /** + * If the caller has indicated an interest + * in being notified of this feature, creates + * a GUID for this session that can, for example, + * be used as authentication for killing the server + * (checked externally). + */ + private initializeSessionKey = async (): Promise => { + if (this.notifiers?.key) { + this.key = Utils.GenerateGuid(); + const success = await this.notifiers.key(this.key); + const statement = success ? green("distributed session key to recipients") : red("distribution of session key failed"); + this.log(statement); + } } - // handle exceptions in the master thread - there shouldn't be many of these - // the IPC (inter process communication) channel closed exception can't seem - // to be caught in a try catch, and is inconsequential, so it is ignored - process.on("uncaughtException", ({ message, stack }): void => { - if (message !== "Channel closed") { - log(red(message)); - if (stack) { - log(`uncaught exception\n${red(stack)}`); + /** + * Builds the repl that allows the following commands to be typed into stdin of the master thread. + */ + private initializeRepl = (): Repl => { + const repl = new Repl({ identifier: () => `${this.timestamp()} ${this.configuration.masterIdentifier}` }); + const boolean = /true|false/; + const number = /\d+/; + const letters = /[a-zA-Z]+/; + repl.registerCommand("exit", [/clean|force/], args => this.killSession(args[0] === "clean")); + repl.registerCommand("restart", [/clean|force/], args => this.tryKillActiveWorker(args[0] === "clean")); + repl.registerCommand("set", [letters, "port", number, boolean], args => this.setPort(args[0], Number(args[2]), args[3] === "true")); + repl.registerCommand("set", [/polling/, number, boolean], args => { + const newPollingIntervalSeconds = Math.floor(Number(args[2])); + if (newPollingIntervalSeconds < 0) { + this.log(red("the polling interval must be a non-negative integer")); + } else { + if (newPollingIntervalSeconds !== this.configuration.pollingIntervalSeconds) { + this.configuration.pollingIntervalSeconds = newPollingIntervalSeconds; + if (args[3] === "true") { + this.activeWorker?.send({ newPollingIntervalSeconds }); + } + } + } + }); + return repl; + } + + /** + * Reads in configuration .json file only once, in the master thread + * and pass down any variables the pertinent to the child processes as environment variables. + */ + private loadAndValidateConfiguration = (): Configuration => { + try { + console.log(this.timestamp(), cyan("validating configuration...")); + const configuration: Configuration = JSON.parse(readFileSync('./session.config.json', 'utf8')); + const options = { + throwError: true, + allowUnknownAttributes: false + }; + // ensure all necessary and no excess information is specified by the configuration file + validate(configuration, configurationSchema, options); + let formatMaster = true; + let formatWorker = true; + Object.keys(defaultConfiguration).forEach(property => { + if (!configuration[property]) { + if (property === "masterIdentifier") { + formatMaster = false; + } else if (property === "workerIdentifier") { + formatWorker = false; + } + configuration[property] = defaultConfiguration[property]; + } + }); + if (formatMaster) { + configuration.masterIdentifier = yellow(configuration.masterIdentifier + ":"); + } + if (formatWorker) { + configuration.workerIdentifier = magenta(configuration.workerIdentifier + ":"); + } + return configuration; + } catch (error) { + if (error instanceof ValidationError) { + console.log(red("\nSession configuration failed.")); + console.log("The given session.config.json configuration file is invalid."); + console.log(`${error.instance}: ${error.stack}`); + process.exit(0); + } else if (error.code === "ENOENT" && error.path === "./session.config.json") { + console.log(cyan("Loading default session parameters...")); + console.log("Consider including a session.config.json configuration file in your project root for customization."); + return defaultConfiguration; + } else { + console.log(red("\nSession configuration failed.")); + console.log("The following unknown error occurred during configuration."); + console.log(error.stack); + process.exit(0); } } - }); + } - // determines whether or not we see the compilation / initialization / runtime output of each child server process - setupMaster({ silent: !showServerOutput }); - // attempts to kills the active worker ungracefully, unless otherwise specified - const tryKillActiveWorker = (graceful = false): boolean => { - if (activeWorker && !activeWorker.isDead()) { + private executeExitHandlers = async (reason: Error | null) => Promise.all(this.exitHandlers.map(handler => handler(reason))); + + /** + * Attempts to kill the active worker gracefully, unless otherwise specified. + */ + private tryKillActiveWorker = (graceful = true): boolean => { + if (!this.activeWorker?.isDead()) { if (graceful) { - activeWorker.kill(); + this.activeWorker?.send({ manualExit: true }); } else { - activeWorker.process.kill(); + this.activeWorker?.process.kill(); } return true; } return false; - }; - - const restartServer = (): void => { - // indicate to the worker that we are 'expecting' this restart - activeWorker.send({ setResponsiveness: false }); - tryKillActiveWorker(true); - }; - - const killSession = (graceful = true): never => { - log(cyan(`exiting session ${graceful ? "clean" : "immediate"}ly`)); - tryKillActiveWorker(graceful); - process.exit(0); - }; + } - const setPort = (port: "server" | "socket" | string, value: number, immediateRestart: boolean): void => { + /** + * Allows the caller to set the port at which the target (be it the server, + * the websocket, some other custom port) is listening. If an immediate restart + * is specified, this monitor will kill the active child and re-launch the server + * at the port. Otherwise, the updated port won't be used until / unless the child + * dies on its own and triggers a restart. + */ + private setPort = (port: "server" | "socket" | string, value: number, immediateRestart: boolean): void => { if (value > 1023 && value < 65536) { - ports[port] = value; + this.configuration.ports[port] = value; if (immediateRestart) { - restartServer(); + this.tryKillActiveWorker(); } } else { - log(red(`${port} is an invalid port number`)); + this.log(red(`${port} is an invalid port number`)); } - }; + } - // kills the current active worker and proceeds to spawn a new worker, - // feeding in configuration information as environment variables - const spawn = (): void => { - tryKillActiveWorker(); - activeWorker = fork({ + /** + * Kills the current active worker and proceeds to spawn a new worker, + * feeding in configuration information as environment variables. + */ + private spawn = (): void => { + const { + pollingRoute, + pollingFailureTolerance, + pollingIntervalSeconds, + ports + } = this.configuration; + this.tryKillActiveWorker(); + this.activeWorker = fork({ pollingRoute, pollingFailureTolerance, serverPort: ports.server, socketPort: ports.socket, pollingIntervalSeconds, - session_key: key + session_key: this.key }); - log(cyan(`spawned new server worker with process id ${activeWorker.process.pid}`)); + this.log(cyan(`spawned new server worker with process id ${this.activeWorker.process.pid}`)); // an IPC message handler that executes actions on the master thread when prompted by the active worker - activeWorker.on("message", async ({ lifecycle, action }) => { + this.activeWorker.on("message", async ({ lifecycle, action }) => { if (action) { - const { message, args } = action as SessionAction; - console.log(timestamp(), `${workerIdentifier} action requested (${cyan(message)})`); + const { message, args } = action as Monitor.Action; + console.log(this.timestamp(), `${this.configuration.workerIdentifier} action requested (${cyan(message)})`); switch (message) { case "kill": - log(red("an authorized user has manually ended the server session")); - killSession(args.graceful); + this.log(red("an authorized user has manually ended the server session")); + this.killSession(args.graceful); case "notify_crash": - if (notifiers && notifiers.crash) { + if (this.notifiers?.crash) { const { error } = args; - const success = await notifiers.crash(error, log); + const success = await this.notifiers.crash(error); const statement = success ? green("distributed crash notification to recipients") : red("distribution of crash notification failed"); - log(statement); + this.log(statement); } case "set_port": const { port, value, immediateRestart } = args; - setPort(port, value, immediateRestart); - default: - const handlers = onMessage[message]; - if (handlers) { - handlers.forEach(handler => handler({ message, args })); - } + this.setPort(port, value, immediateRestart); + } + const handlers = this.onMessage[message]; + if (handlers) { + handlers.forEach(handler => handler({ message, args })); } } else if (lifecycle) { - console.log(timestamp(), `${workerIdentifier} lifecycle phase (${lifecycle})`); + console.log(this.timestamp(), `${this.configuration.workerIdentifier} lifecycle phase (${lifecycle})`); } }); - }; - - // a helpful cluster event called on the master thread each time a child process exits - on("exit", ({ process: { pid } }, code, signal) => { - const prompt = `server worker with process id ${pid} has exited with code ${code}${signal === null ? "" : `, having encountered signal ${signal}`}.`; - log(cyan(prompt)); - // to make this a robust, continuous session, every time a child process dies, we immediately spawn a new one - spawn(); - }); - - // builds the repl that allows the following commands to be typed into stdin of the master thread - const repl = new Repl({ identifier: () => `${timestamp()} ${masterIdentifier}` }); - const boolean = /true|false/; - const number = /\d+/; - const letters = /[a-zA-Z]+/; - repl.registerCommand("exit", [/clean|force/], args => killSession(args[0] === "clean")); - repl.registerCommand("restart", [], restartServer); - repl.registerCommand("set", [letters, "port", number, boolean], args => setPort(args[0], Number(args[2]), args[3] === "true")); - repl.registerCommand("set", [/polling/, number, boolean], args => { - const newPollingIntervalSeconds = Math.floor(Number(args[2])); - if (newPollingIntervalSeconds < 0) { - log(red("the polling interval must be a non-negative integer")); - } else { - if (newPollingIntervalSeconds !== pollingIntervalSeconds) { - pollingIntervalSeconds = newPollingIntervalSeconds; - if (args[3] === "true") { - activeWorker.send({ newPollingIntervalSeconds }); - } - } - } - }); - // finally, set things in motion by spawning off the first child (server) process - spawn(); - - // returned to allow the caller to add custom commands - return { - addReplCommand: repl.registerCommand, - addServerMessageListener: (message: string, handler: ActionHandler) => { - const handlers = onMessage[message]; - if (handlers) { - handlers.push(handler); - } else { - onMessage[message] = [handler]; - } - }, - removeServerMessageListener: (message: string, handler: ActionHandler) => { - const handlers = onMessage[message]; - if (handlers) { - const index = handlers.indexOf(handler); - if (index > -1) { - handlers.splice(index, 1); - } - } - }, - clearServerMessageListeners: (message: string) => onMessage[message] = undefined, - restartServer, - killSession, - setPort, - log - }; + } + } + + /** * Effectively, each worker repairs the connection to the server by reintroducing a consistent state * if its predecessor has died. It itself also polls the server heartbeat, and exits with a notification * email if the server encounters an uncaught exception or if the server cannot be reached. - * @param work the function specifying the work to be done by each worker thread */ - export async function initializeWorkerThread(work: Function): Promise { - let shouldServerBeResponsive = false; - const exitHandlers: ExitHandler[] = []; - let pollingFailureCount = 0; - - const lifecycleNotification = (lifecycle: string) => process.send?.({ lifecycle }); - - // notify master thread (which will log update in the console) of initialization via IPC - lifecycleNotification(green("compiling and initializing...")); + export class ServerWorker { + + private shouldServerBeResponsive = false; + private exitHandlers: ExitHandler[] = []; + private pollingFailureCount = 0; + private pollingIntervalSeconds: number; + private pollingFailureTolerance: number; + private pollTarget: string; + private serverPort: number; + + /** + * Allows developers to invoke application specific logic + * by hooking into the exiting of the server process. + */ + public addExitHandler = (handler: ExitHandler) => this.exitHandlers.push(handler); + + /** + * Kill the session monitor (parent process) from this + * server worker (child process). This will also kill + * this process (child process). + */ + public killSession = (graceful = true) => this.sendMonitorAction("kill", { graceful }); + + /** + * A convenience wrapper to tell the session monitor (parent process) + * to carry out the action with the specified message and arguments. + */ + public sendMonitorAction = (message: string, args?: any) => process.send!({ action: { message, args } }); + + constructor(work: Function) { + this.lifecycleNotification(green(`initializing process... (${white(`${process.execPath} ${process.execArgv.join(" ")}`)})`)); + + const { pollingRoute, serverPort, pollingIntervalSeconds, pollingFailureTolerance } = process.env; + this.serverPort = Number(serverPort); + this.pollingIntervalSeconds = Number(pollingIntervalSeconds); + this.pollingFailureTolerance = Number(pollingFailureTolerance); + this.pollTarget = `http://localhost:${serverPort}${pollingRoute}`; + + this.configureProcess(); + work(); + this.pollServer(); + } - // updates the local value of listening to the value sent from master - process.on("message", ({ setResponsiveness, newPollingIntervalSeconds }) => { - if (setResponsiveness) { - shouldServerBeResponsive = setResponsiveness; - } - if (newPollingIntervalSeconds) { - pollingIntervalSeconds = newPollingIntervalSeconds; - } - }); + /** + * Set up message and uncaught exception handlers for this + * server process. + */ + private configureProcess = () => { + // updates the local values of variables to the those sent from master + process.on("message", async ({ setResponsiveness, newPollingIntervalSeconds, manualExit }) => { + if (setResponsiveness !== undefined) { + this.shouldServerBeResponsive = setResponsiveness; + } + if (newPollingIntervalSeconds !== undefined) { + this.pollingIntervalSeconds = newPollingIntervalSeconds; + } + if (manualExit !== undefined) { + await this.executeExitHandlers(null); + process.exit(0); + } + }); - const executeExitHandlers = async (reason: Error | null) => Promise.all(exitHandlers.map(handler => handler(reason))); + // one reason to exit, as the process might be in an inconsistent state after such an exception + process.on('uncaughtException', this.proactiveUnplannedExit); + } - // called whenever the process has a reason to terminate, either through an uncaught exception - // in the process (potentially inconsistent state) or the server cannot be reached - const activeExit = async (error: Error): Promise => { - shouldServerBeResponsive = false; + /** + * Execute the list of functions registered to be called + * whenever the process exits. + */ + private executeExitHandlers = async (reason: Error | null) => Promise.all(this.exitHandlers.map(handler => handler(reason))); + + /** + * Notify master thread (which will log update in the console) of initialization via IPC. + */ + private lifecycleNotification = (event: string) => process.send?.({ lifecycle: event }); + + /** + * Called whenever the process has a reason to terminate, either through an uncaught exception + * in the process (potentially inconsistent state) or the server cannot be reached. + */ + private proactiveUnplannedExit = async (error: Error): Promise => { + this.shouldServerBeResponsive = false; // communicates via IPC to the master thread that it should dispatch a crash notification email - process.send?.({ - action: { - message: "notify_crash", - args: { error } - } - }); - await executeExitHandlers(error); + this.sendMonitorAction("notify_crash", { error }); + await this.executeExitHandlers(error); // notify master thread (which will log update in the console) of crash event via IPC - lifecycleNotification(red(`crash event detected @ ${new Date().toUTCString()}`)); - lifecycleNotification(red(error.message)); + this.lifecycleNotification(red(`crash event detected @ ${new Date().toUTCString()}`)); + this.lifecycleNotification(red(error.message)); process.exit(1); - }; - - // one reason to exit, as the process might be in an inconsistent state after such an exception - process.on('uncaughtException', activeExit); - - const { env } = process; - const { pollingRoute, serverPort } = env; - let pollingIntervalSeconds = Number(env.pollingIntervalSeconds); - const pollingFailureTolerance = Number(env.pollingFailureTolerance); - // this monitors the health of the server by submitting a get request to whatever port / route specified - // by the configuration every n seconds, where n is also given by the configuration. - const pollTarget = `http://localhost:${serverPort}${pollingRoute}`; - const pollServer = async (): Promise => { + } + + /** + * This monitors the health of the server by submitting a get request to whatever port / route specified + * by the configuration every n seconds, where n is also given by the configuration. + */ + private pollServer = async (): Promise => { await new Promise(resolve => { setTimeout(async () => { try { - await get(pollTarget); - if (!shouldServerBeResponsive) { - // notify master thread (which will log update in the console) via IPC that the server is up and running - process.send?.({ lifecycle: green(`listening on ${serverPort}...`) }); + await get(this.pollTarget); + if (!this.shouldServerBeResponsive) { + // notify monitor thread that the server is up and running + this.lifecycleNotification(green(`listening on ${this.serverPort}...`)); } - shouldServerBeResponsive = true; + this.shouldServerBeResponsive = true; resolve(); } catch (error) { // if we expect the server to be unavailable, i.e. during compilation, // the listening variable is false, activeExit will return early and the child // process will continue - if (shouldServerBeResponsive) { - if (++pollingFailureCount > pollingFailureTolerance) { - activeExit(error); + if (this.shouldServerBeResponsive) { + if (++this.pollingFailureCount > this.pollingFailureTolerance) { + this.proactiveUnplannedExit(error); } else { - lifecycleNotification(yellow(`the server has encountered ${pollingFailureCount} of ${pollingFailureTolerance} tolerable failures`)); + this.lifecycleNotification(yellow(`the server has encountered ${this.pollingFailureCount} of ${this.pollingFailureTolerance} tolerable failures`)); } } } - }, 1000 * pollingIntervalSeconds); + }, 1000 * this.pollingIntervalSeconds); }); // controlled, asynchronous infinite recursion achieves a persistent poll that does not submit a new request until the previous has completed - pollServer(); - }; - - work(); - pollServer(); // begin polling + this.pollServer(); + } - return { - addExitHandler: (handler: ExitHandler) => exitHandlers.push(handler), - killSession: (graceful = true) => process.send!({ action: { message: "kill", args: { graceful } } }), - sendSessionAction: (message: string, args?: any) => process.send!({ action: { message, args } }) - }; } } \ No newline at end of file diff --git a/src/server/index.ts b/src/server/index.ts index ffab0f380..8e0ddc206 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -25,6 +25,7 @@ import { yellow, red } from "colors"; import { Session } from "./Session/session"; import { DashSessionAgent } from "./DashSession"; +export let sessionAgent: Session.AppliedSessionAgent; export const publicDirectory = path.resolve(__dirname, "public"); export const filesDirectory = path.resolve(publicDirectory, "files"); @@ -141,7 +142,6 @@ export async function launchServer() { await initializeServer(routeSetter); } -export const sessionAgent = new DashSessionAgent(); /** * If you're in development mode, you won't need to run a session. * The session spawns off new server processes each time an error is encountered, and doesn't @@ -149,7 +149,7 @@ export const sessionAgent = new DashSessionAgent(); * So, the 'else' clause is exactly what we've always run when executing npm start. */ if (process.env.RELEASE) { - sessionAgent.launch(); + (sessionAgent = new DashSessionAgent()).launch(); } else { launchServer(); } \ No newline at end of file -- cgit v1.2.3-70-g09d2 From 252a8ea3483852b29171b459c65217aa593c25f1 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 8 Jan 2020 07:14:00 -0500 Subject: factory --- src/server/DashSession.ts | 4 ++-- src/server/Session/session.ts | 30 +++++++++++++++++++++++++----- src/server/repl.ts | 4 ++-- 3 files changed, 29 insertions(+), 9 deletions(-) (limited to 'src/server/DashSession.ts') diff --git a/src/server/DashSession.ts b/src/server/DashSession.ts index 7c2cfaf8d..22bcbadc9 100644 --- a/src/server/DashSession.ts +++ b/src/server/DashSession.ts @@ -19,7 +19,7 @@ export class DashSessionAgent extends Session.AppliedSessionAgent { private readonly signature = "-Dash Server Session Manager"; protected async launchMonitor() { - const monitor = new Session.Monitor({ + const monitor = Session.Monitor.Create({ key: async key => { // this sends a pseudorandomly generated guid to the configuration's recipients, allowing them alone // to kill the server via the /kill/:key route @@ -54,7 +54,7 @@ export class DashSessionAgent extends Session.AppliedSessionAgent { } protected async launchServerWorker() { - const worker = new Session.ServerWorker(launchServer); // server initialization delegated to worker + const worker = Session.ServerWorker.Create(launchServer); // server initialization delegated to worker worker.addExitHandler(() => Utils.Emit(WebSocket._socket, MessageStore.ConnectionTerminated, "Manual")); return worker; } diff --git a/src/server/Session/session.ts b/src/server/Session/session.ts index 3b2be9d6a..144d50c52 100644 --- a/src/server/Session/session.ts +++ b/src/server/Session/session.ts @@ -111,6 +111,7 @@ export namespace Session { */ export class Monitor { + private static count = 0; private exitHandlers: ExitHandler[] = []; private readonly notifiers: Monitor.NotifierHooks | undefined; private readonly configuration: Configuration; @@ -119,14 +120,23 @@ export namespace Session { private key: string | undefined; private repl: Repl; + public static Create(notifiers: Monitor.NotifierHooks) { + if (++Monitor.count > 1) { + throw new Error("Cannot create more than one monitor"); + } else { + return new Monitor(notifiers); + } + } + /** * Kill this session and its active child * server process, either gracefully (may wait * indefinitely, but at least allows active networking * requests to complete) or immediately. */ - public killSession = async (graceful = true): Promise => { + public killSession = async (graceful = true) => { this.log(cyan(`exiting session ${graceful ? "clean" : "immediate"}ly`)); + await this.executeExitHandlers(null); this.tryKillActiveWorker(graceful); process.exit(0); } @@ -176,7 +186,7 @@ export namespace Session { */ public clearServerMessageListeners = (message: string) => this.onMessage[message] = undefined; - constructor(notifiers?: Monitor.NotifierHooks) { + private constructor(notifiers?: Monitor.NotifierHooks) { this.notifiers = notifiers; console.log(this.timestamp(), cyan("initializing session...")); @@ -385,6 +395,7 @@ export namespace Session { case "kill": this.log(red("an authorized user has manually ended the server session")); this.killSession(args.graceful); + break; case "notify_crash": if (this.notifiers?.crash) { const { error } = args; @@ -392,9 +403,11 @@ export namespace Session { const statement = success ? green("distributed crash notification to recipients") : red("distribution of crash notification failed"); this.log(statement); } + break; case "set_port": const { port, value, immediateRestart } = args; this.setPort(port, value, immediateRestart); + break; } const handlers = this.onMessage[message]; if (handlers) { @@ -408,8 +421,6 @@ export namespace Session { } - - /** * Effectively, each worker repairs the connection to the server by reintroducing a consistent state * if its predecessor has died. It itself also polls the server heartbeat, and exits with a notification @@ -417,6 +428,7 @@ export namespace Session { */ export class ServerWorker { + private static count = 0; private shouldServerBeResponsive = false; private exitHandlers: ExitHandler[] = []; private pollingFailureCount = 0; @@ -425,6 +437,14 @@ export namespace Session { private pollTarget: string; private serverPort: number; + public static Create(work: Function) { + if (++ServerWorker.count > 1) { + throw new Error("Cannot create more than one worker per thread"); + } else { + return new ServerWorker(work); + } + } + /** * Allows developers to invoke application specific logic * by hooking into the exiting of the server process. @@ -444,7 +464,7 @@ export namespace Session { */ public sendMonitorAction = (message: string, args?: any) => process.send!({ action: { message, args } }); - constructor(work: Function) { + private constructor(work: Function) { this.lifecycleNotification(green(`initializing process... (${white(`${process.execPath} ${process.execArgv.join(" ")}`)})`)); const { pollingRoute, serverPort, pollingIntervalSeconds, pollingFailureTolerance } = process.env; diff --git a/src/server/repl.ts b/src/server/repl.ts index faf1eab15..c4526528e 100644 --- a/src/server/repl.ts +++ b/src/server/repl.ts @@ -97,16 +97,16 @@ export default class Repl { const candidates = registered.filter(({ argPatterns: { length: count } }) => count === length); for (const { argPatterns, action } of candidates) { const parsed: string[] = []; - let matched = false; + let matched = true; if (length) { for (let i = 0; i < length; i++) { let matches: RegExpExecArray | null; if ((matches = argPatterns[i].exec(args[i])) === null) { + matched = false; break; } parsed.push(matches[0]); } - matched = true; } if (!length || matched) { await action(parsed); -- cgit v1.2.3-70-g09d2 From e08598d32fdfdcbd12532facf05b7119bd780a19 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 8 Jan 2020 12:31:01 -0500 Subject: solr manager fixes, regex --- solr-8.3.1/server/solr/dash/data/index/_hs_1.liv | Bin 187 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_hs_2.liv | Bin 0 -> 187 bytes solr-8.3.1/server/solr/dash/data/index/_j7_1.liv | Bin 75 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_j7_2.liv | Bin 0 -> 75 bytes solr-8.3.1/server/solr/dash/data/index/_jo.dii | Bin 72 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jo.dim | Bin 218 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jo.fdt | Bin 651 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jo.fdx | Bin 84 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jo.fnm | Bin 2347 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jo.nvd | Bin 353 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jo.nvm | Bin 643 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jo.si | Bin 531 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jo_1.liv | Bin 67 -> 0 bytes .../server/solr/dash/data/index/_jo_Lucene50_0.doc | Bin 2526 -> 0 bytes .../server/solr/dash/data/index/_jo_Lucene50_0.pos | Bin 2723 -> 0 bytes .../server/solr/dash/data/index/_jo_Lucene50_0.tim | Bin 9422 -> 0 bytes .../server/solr/dash/data/index/_jo_Lucene50_0.tip | Bin 657 -> 0 bytes .../server/solr/dash/data/index/_jo_Lucene80_0.dvd | Bin 205 -> 0 bytes .../server/solr/dash/data/index/_jo_Lucene80_0.dvm | Bin 249 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jp.dii | Bin 72 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jp.dim | Bin 229 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jp.fdt | Bin 398 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jp.fdx | Bin 84 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jp.fnm | Bin 1081 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jp.nvd | Bin 235 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jp.nvm | Bin 355 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jp.si | Bin 531 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jp_1.liv | Bin 67 -> 0 bytes .../server/solr/dash/data/index/_jp_Lucene50_0.doc | Bin 2635 -> 0 bytes .../server/solr/dash/data/index/_jp_Lucene50_0.pos | Bin 2579 -> 0 bytes .../server/solr/dash/data/index/_jp_Lucene50_0.tim | Bin 3558 -> 0 bytes .../server/solr/dash/data/index/_jp_Lucene50_0.tip | Bin 304 -> 0 bytes .../server/solr/dash/data/index/_jp_Lucene80_0.dvd | Bin 121 -> 0 bytes .../server/solr/dash/data/index/_jp_Lucene80_0.dvm | Bin 249 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jq.dii | Bin 78 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jq.dim | Bin 291 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jq.fdt | Bin 719 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jq.fdx | Bin 84 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jq.fnm | Bin 1282 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jq.nvd | Bin 131 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jq.nvm | Bin 283 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jq.si | Bin 531 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jq_2.liv | Bin 67 -> 0 bytes .../server/solr/dash/data/index/_jq_Lucene50_0.doc | Bin 116 -> 0 bytes .../server/solr/dash/data/index/_jq_Lucene50_0.pos | Bin 458 -> 0 bytes .../server/solr/dash/data/index/_jq_Lucene50_0.tim | Bin 5813 -> 0 bytes .../server/solr/dash/data/index/_jq_Lucene50_0.tip | Bin 366 -> 0 bytes .../server/solr/dash/data/index/_jq_Lucene80_0.dvd | Bin 190 -> 0 bytes .../server/solr/dash/data/index/_jq_Lucene80_0.dvm | Bin 249 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jr.dii | Bin 69 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jr.dim | Bin 124 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jr.fdt | Bin 226 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jr.fdx | Bin 84 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jr.fnm | Bin 445 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jr.si | Bin 496 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_jr_1.liv | Bin 67 -> 0 bytes .../server/solr/dash/data/index/_jr_Lucene50_0.doc | Bin 114 -> 0 bytes .../server/solr/dash/data/index/_jr_Lucene50_0.tim | Bin 520 -> 0 bytes .../server/solr/dash/data/index/_jr_Lucene50_0.tip | Bin 145 -> 0 bytes .../server/solr/dash/data/index/_jr_Lucene80_0.dvd | Bin 110 -> 0 bytes .../server/solr/dash/data/index/_jr_Lucene80_0.dvm | Bin 249 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/_js.dii | Bin 0 -> 110 bytes solr-8.3.1/server/solr/dash/data/index/_js.dim | Bin 0 -> 9025 bytes solr-8.3.1/server/solr/dash/data/index/_js.fdt | Bin 0 -> 40992 bytes solr-8.3.1/server/solr/dash/data/index/_js.fdx | Bin 0 -> 103 bytes solr-8.3.1/server/solr/dash/data/index/_js.fnm | Bin 0 -> 5723 bytes solr-8.3.1/server/solr/dash/data/index/_js.nvd | Bin 0 -> 4794 bytes solr-8.3.1/server/solr/dash/data/index/_js.nvm | Bin 0 -> 1471 bytes solr-8.3.1/server/solr/dash/data/index/_js.si | Bin 0 -> 533 bytes solr-8.3.1/server/solr/dash/data/index/_js_1.liv | Bin 0 -> 131 bytes .../server/solr/dash/data/index/_js_Lucene50_0.doc | Bin 0 -> 270044 bytes .../server/solr/dash/data/index/_js_Lucene50_0.pos | Bin 0 -> 318551 bytes .../server/solr/dash/data/index/_js_Lucene50_0.tim | Bin 0 -> 423689 bytes .../server/solr/dash/data/index/_js_Lucene50_0.tip | Bin 0 -> 11737 bytes .../server/solr/dash/data/index/_js_Lucene80_0.dvd | Bin 0 -> 2396 bytes .../server/solr/dash/data/index/_js_Lucene80_0.dvm | Bin 0 -> 249 bytes solr-8.3.1/server/solr/dash/data/index/segments_d | Bin 649 -> 0 bytes solr-8.3.1/server/solr/dash/data/index/segments_e | Bin 0 -> 451 bytes .../solr/dash/data/tlog/tlog.0000000000000000012 | Bin 196943 -> 196964 bytes .../tmp/start_5201032335657884982.properties | 11 +++++++++++ src/server/ApiManagers/SearchManager.ts | 2 +- src/server/DashSession.ts | 2 +- 82 files changed, 13 insertions(+), 2 deletions(-) delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_hs_1.liv create mode 100644 solr-8.3.1/server/solr/dash/data/index/_hs_2.liv delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_j7_1.liv create mode 100644 solr-8.3.1/server/solr/dash/data/index/_j7_2.liv delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jo.dii delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jo.dim delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jo.fdt delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jo.fdx delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jo.fnm delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jo.nvd delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jo.nvm delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jo.si delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jo_1.liv delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jo_Lucene50_0.doc delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jo_Lucene50_0.pos delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jo_Lucene50_0.tim delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jo_Lucene50_0.tip delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jo_Lucene80_0.dvd delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jo_Lucene80_0.dvm delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jp.dii delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jp.dim delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jp.fdt delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jp.fdx delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jp.fnm delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jp.nvd delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jp.nvm delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jp.si delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jp_1.liv delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jp_Lucene50_0.doc delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jp_Lucene50_0.pos delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jp_Lucene50_0.tim delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jp_Lucene50_0.tip delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jp_Lucene80_0.dvd delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jp_Lucene80_0.dvm delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jq.dii delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jq.dim delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jq.fdt delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jq.fdx delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jq.fnm delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jq.nvd delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jq.nvm delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jq.si delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jq_2.liv delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jq_Lucene50_0.doc delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jq_Lucene50_0.pos delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jq_Lucene50_0.tim delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jq_Lucene50_0.tip delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jq_Lucene80_0.dvd delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jq_Lucene80_0.dvm delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jr.dii delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jr.dim delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jr.fdt delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jr.fdx delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jr.fnm delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jr.si delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jr_1.liv delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jr_Lucene50_0.doc delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jr_Lucene50_0.tim delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jr_Lucene50_0.tip delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jr_Lucene80_0.dvd delete mode 100644 solr-8.3.1/server/solr/dash/data/index/_jr_Lucene80_0.dvm create mode 100644 solr-8.3.1/server/solr/dash/data/index/_js.dii create mode 100644 solr-8.3.1/server/solr/dash/data/index/_js.dim create mode 100644 solr-8.3.1/server/solr/dash/data/index/_js.fdt create mode 100644 solr-8.3.1/server/solr/dash/data/index/_js.fdx create mode 100644 solr-8.3.1/server/solr/dash/data/index/_js.fnm create mode 100644 solr-8.3.1/server/solr/dash/data/index/_js.nvd create mode 100644 solr-8.3.1/server/solr/dash/data/index/_js.nvm create mode 100644 solr-8.3.1/server/solr/dash/data/index/_js.si create mode 100644 solr-8.3.1/server/solr/dash/data/index/_js_1.liv create mode 100644 solr-8.3.1/server/solr/dash/data/index/_js_Lucene50_0.doc create mode 100644 solr-8.3.1/server/solr/dash/data/index/_js_Lucene50_0.pos create mode 100644 solr-8.3.1/server/solr/dash/data/index/_js_Lucene50_0.tim create mode 100644 solr-8.3.1/server/solr/dash/data/index/_js_Lucene50_0.tip create mode 100644 solr-8.3.1/server/solr/dash/data/index/_js_Lucene80_0.dvd create mode 100644 solr-8.3.1/server/solr/dash/data/index/_js_Lucene80_0.dvm delete mode 100644 solr-8.3.1/server/solr/dash/data/index/segments_d create mode 100644 solr-8.3.1/server/solr/dash/data/index/segments_e create mode 100644 solr-8.3.1/server/tmp/start_5201032335657884982.properties (limited to 'src/server/DashSession.ts') diff --git a/solr-8.3.1/server/solr/dash/data/index/_hs_1.liv b/solr-8.3.1/server/solr/dash/data/index/_hs_1.liv deleted file mode 100644 index 9e1e90cb8..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_hs_1.liv and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_hs_2.liv b/solr-8.3.1/server/solr/dash/data/index/_hs_2.liv new file mode 100644 index 000000000..d2a881ab8 Binary files /dev/null and b/solr-8.3.1/server/solr/dash/data/index/_hs_2.liv differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_j7_1.liv b/solr-8.3.1/server/solr/dash/data/index/_j7_1.liv deleted file mode 100644 index 65e825f18..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_j7_1.liv and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_j7_2.liv b/solr-8.3.1/server/solr/dash/data/index/_j7_2.liv new file mode 100644 index 000000000..e064804e4 Binary files /dev/null and b/solr-8.3.1/server/solr/dash/data/index/_j7_2.liv differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jo.dii b/solr-8.3.1/server/solr/dash/data/index/_jo.dii deleted file mode 100644 index d621dc010..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jo.dii and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jo.dim b/solr-8.3.1/server/solr/dash/data/index/_jo.dim deleted file mode 100644 index a9d846c55..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jo.dim and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jo.fdt b/solr-8.3.1/server/solr/dash/data/index/_jo.fdt deleted file mode 100644 index 69d7ce59c..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jo.fdt and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jo.fdx b/solr-8.3.1/server/solr/dash/data/index/_jo.fdx deleted file mode 100644 index 71a73ac09..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jo.fdx and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jo.fnm b/solr-8.3.1/server/solr/dash/data/index/_jo.fnm deleted file mode 100644 index b02414ef0..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jo.fnm and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jo.nvd b/solr-8.3.1/server/solr/dash/data/index/_jo.nvd deleted file mode 100644 index 993fa7ddc..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jo.nvd and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jo.nvm b/solr-8.3.1/server/solr/dash/data/index/_jo.nvm deleted file mode 100644 index 4397e5533..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jo.nvm and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jo.si b/solr-8.3.1/server/solr/dash/data/index/_jo.si deleted file mode 100644 index 2f0aefd63..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jo.si and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jo_1.liv b/solr-8.3.1/server/solr/dash/data/index/_jo_1.liv deleted file mode 100644 index 46316505e..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jo_1.liv and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jo_Lucene50_0.doc b/solr-8.3.1/server/solr/dash/data/index/_jo_Lucene50_0.doc deleted file mode 100644 index 1cbf85053..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jo_Lucene50_0.doc and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jo_Lucene50_0.pos b/solr-8.3.1/server/solr/dash/data/index/_jo_Lucene50_0.pos deleted file mode 100644 index 97206ebfa..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jo_Lucene50_0.pos and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jo_Lucene50_0.tim b/solr-8.3.1/server/solr/dash/data/index/_jo_Lucene50_0.tim deleted file mode 100644 index d06a85746..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jo_Lucene50_0.tim and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jo_Lucene50_0.tip b/solr-8.3.1/server/solr/dash/data/index/_jo_Lucene50_0.tip deleted file mode 100644 index fd9ff3dae..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jo_Lucene50_0.tip and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jo_Lucene80_0.dvd b/solr-8.3.1/server/solr/dash/data/index/_jo_Lucene80_0.dvd deleted file mode 100644 index 602ba23e7..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jo_Lucene80_0.dvd and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jo_Lucene80_0.dvm b/solr-8.3.1/server/solr/dash/data/index/_jo_Lucene80_0.dvm deleted file mode 100644 index 1fd81041f..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jo_Lucene80_0.dvm and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jp.dii b/solr-8.3.1/server/solr/dash/data/index/_jp.dii deleted file mode 100644 index c39beb1bd..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jp.dii and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jp.dim b/solr-8.3.1/server/solr/dash/data/index/_jp.dim deleted file mode 100644 index 87a5071d7..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jp.dim and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jp.fdt b/solr-8.3.1/server/solr/dash/data/index/_jp.fdt deleted file mode 100644 index a232efdf8..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jp.fdt and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jp.fdx b/solr-8.3.1/server/solr/dash/data/index/_jp.fdx deleted file mode 100644 index 0b5e95a9a..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jp.fdx and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jp.fnm b/solr-8.3.1/server/solr/dash/data/index/_jp.fnm deleted file mode 100644 index c09282147..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jp.fnm and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jp.nvd b/solr-8.3.1/server/solr/dash/data/index/_jp.nvd deleted file mode 100644 index 12dcf3d4d..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jp.nvd and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jp.nvm b/solr-8.3.1/server/solr/dash/data/index/_jp.nvm deleted file mode 100644 index 4bde4b5f0..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jp.nvm and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jp.si b/solr-8.3.1/server/solr/dash/data/index/_jp.si deleted file mode 100644 index 40cd02b98..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jp.si and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jp_1.liv b/solr-8.3.1/server/solr/dash/data/index/_jp_1.liv deleted file mode 100644 index be94523f4..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jp_1.liv and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jp_Lucene50_0.doc b/solr-8.3.1/server/solr/dash/data/index/_jp_Lucene50_0.doc deleted file mode 100644 index 90774bb6f..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jp_Lucene50_0.doc and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jp_Lucene50_0.pos b/solr-8.3.1/server/solr/dash/data/index/_jp_Lucene50_0.pos deleted file mode 100644 index b4814c1f7..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jp_Lucene50_0.pos and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jp_Lucene50_0.tim b/solr-8.3.1/server/solr/dash/data/index/_jp_Lucene50_0.tim deleted file mode 100644 index f8a7871e1..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jp_Lucene50_0.tim and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jp_Lucene50_0.tip b/solr-8.3.1/server/solr/dash/data/index/_jp_Lucene50_0.tip deleted file mode 100644 index c29ac6ed5..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jp_Lucene50_0.tip and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jp_Lucene80_0.dvd b/solr-8.3.1/server/solr/dash/data/index/_jp_Lucene80_0.dvd deleted file mode 100644 index 41afd53b9..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jp_Lucene80_0.dvd and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jp_Lucene80_0.dvm b/solr-8.3.1/server/solr/dash/data/index/_jp_Lucene80_0.dvm deleted file mode 100644 index 527eed668..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jp_Lucene80_0.dvm and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jq.dii b/solr-8.3.1/server/solr/dash/data/index/_jq.dii deleted file mode 100644 index a0a86dfd0..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jq.dii and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jq.dim b/solr-8.3.1/server/solr/dash/data/index/_jq.dim deleted file mode 100644 index e69c06061..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jq.dim and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jq.fdt b/solr-8.3.1/server/solr/dash/data/index/_jq.fdt deleted file mode 100644 index c49ed904f..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jq.fdt and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jq.fdx b/solr-8.3.1/server/solr/dash/data/index/_jq.fdx deleted file mode 100644 index 85d62a39c..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jq.fdx and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jq.fnm b/solr-8.3.1/server/solr/dash/data/index/_jq.fnm deleted file mode 100644 index b3aa23794..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jq.fnm and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jq.nvd b/solr-8.3.1/server/solr/dash/data/index/_jq.nvd deleted file mode 100644 index fb74de020..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jq.nvd and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jq.nvm b/solr-8.3.1/server/solr/dash/data/index/_jq.nvm deleted file mode 100644 index b407b1b63..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jq.nvm and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jq.si b/solr-8.3.1/server/solr/dash/data/index/_jq.si deleted file mode 100644 index cdf2673ca..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jq.si and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jq_2.liv b/solr-8.3.1/server/solr/dash/data/index/_jq_2.liv deleted file mode 100644 index 0ae2a2dc3..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jq_2.liv and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jq_Lucene50_0.doc b/solr-8.3.1/server/solr/dash/data/index/_jq_Lucene50_0.doc deleted file mode 100644 index 91ae597e1..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jq_Lucene50_0.doc and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jq_Lucene50_0.pos b/solr-8.3.1/server/solr/dash/data/index/_jq_Lucene50_0.pos deleted file mode 100644 index 047dd7ae9..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jq_Lucene50_0.pos and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jq_Lucene50_0.tim b/solr-8.3.1/server/solr/dash/data/index/_jq_Lucene50_0.tim deleted file mode 100644 index 352a6b592..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jq_Lucene50_0.tim and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jq_Lucene50_0.tip b/solr-8.3.1/server/solr/dash/data/index/_jq_Lucene50_0.tip deleted file mode 100644 index ca0b2688d..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jq_Lucene50_0.tip and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jq_Lucene80_0.dvd b/solr-8.3.1/server/solr/dash/data/index/_jq_Lucene80_0.dvd deleted file mode 100644 index 7a5fbcef3..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jq_Lucene80_0.dvd and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jq_Lucene80_0.dvm b/solr-8.3.1/server/solr/dash/data/index/_jq_Lucene80_0.dvm deleted file mode 100644 index 228fc7b0a..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jq_Lucene80_0.dvm and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jr.dii b/solr-8.3.1/server/solr/dash/data/index/_jr.dii deleted file mode 100644 index 7bec10455..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jr.dii and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jr.dim b/solr-8.3.1/server/solr/dash/data/index/_jr.dim deleted file mode 100644 index cdf02c5fb..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jr.dim and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jr.fdt b/solr-8.3.1/server/solr/dash/data/index/_jr.fdt deleted file mode 100644 index 183dda424..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jr.fdt and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jr.fdx b/solr-8.3.1/server/solr/dash/data/index/_jr.fdx deleted file mode 100644 index 96942bc74..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jr.fdx and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jr.fnm b/solr-8.3.1/server/solr/dash/data/index/_jr.fnm deleted file mode 100644 index 34731760f..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jr.fnm and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jr.si b/solr-8.3.1/server/solr/dash/data/index/_jr.si deleted file mode 100644 index bc131eb25..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jr.si and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jr_1.liv b/solr-8.3.1/server/solr/dash/data/index/_jr_1.liv deleted file mode 100644 index 22da1aa4f..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jr_1.liv and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jr_Lucene50_0.doc b/solr-8.3.1/server/solr/dash/data/index/_jr_Lucene50_0.doc deleted file mode 100644 index 94df5353c..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jr_Lucene50_0.doc and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jr_Lucene50_0.tim b/solr-8.3.1/server/solr/dash/data/index/_jr_Lucene50_0.tim deleted file mode 100644 index 2f078b0f1..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jr_Lucene50_0.tim and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jr_Lucene50_0.tip b/solr-8.3.1/server/solr/dash/data/index/_jr_Lucene50_0.tip deleted file mode 100644 index b6239f17d..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jr_Lucene50_0.tip and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jr_Lucene80_0.dvd b/solr-8.3.1/server/solr/dash/data/index/_jr_Lucene80_0.dvd deleted file mode 100644 index 171be787e..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jr_Lucene80_0.dvd and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_jr_Lucene80_0.dvm b/solr-8.3.1/server/solr/dash/data/index/_jr_Lucene80_0.dvm deleted file mode 100644 index f76033c91..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/_jr_Lucene80_0.dvm and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_js.dii b/solr-8.3.1/server/solr/dash/data/index/_js.dii new file mode 100644 index 000000000..6456d0dda Binary files /dev/null and b/solr-8.3.1/server/solr/dash/data/index/_js.dii differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_js.dim b/solr-8.3.1/server/solr/dash/data/index/_js.dim new file mode 100644 index 000000000..c52287cc3 Binary files /dev/null and b/solr-8.3.1/server/solr/dash/data/index/_js.dim differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_js.fdt b/solr-8.3.1/server/solr/dash/data/index/_js.fdt new file mode 100644 index 000000000..dd37842f2 Binary files /dev/null and b/solr-8.3.1/server/solr/dash/data/index/_js.fdt differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_js.fdx b/solr-8.3.1/server/solr/dash/data/index/_js.fdx new file mode 100644 index 000000000..9a09b0b33 Binary files /dev/null and b/solr-8.3.1/server/solr/dash/data/index/_js.fdx differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_js.fnm b/solr-8.3.1/server/solr/dash/data/index/_js.fnm new file mode 100644 index 000000000..0bbd5b508 Binary files /dev/null and b/solr-8.3.1/server/solr/dash/data/index/_js.fnm differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_js.nvd b/solr-8.3.1/server/solr/dash/data/index/_js.nvd new file mode 100644 index 000000000..cfba6605d Binary files /dev/null and b/solr-8.3.1/server/solr/dash/data/index/_js.nvd differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_js.nvm b/solr-8.3.1/server/solr/dash/data/index/_js.nvm new file mode 100644 index 000000000..85d8c924e Binary files /dev/null and b/solr-8.3.1/server/solr/dash/data/index/_js.nvm differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_js.si b/solr-8.3.1/server/solr/dash/data/index/_js.si new file mode 100644 index 000000000..45cc01db1 Binary files /dev/null and b/solr-8.3.1/server/solr/dash/data/index/_js.si differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_js_1.liv b/solr-8.3.1/server/solr/dash/data/index/_js_1.liv new file mode 100644 index 000000000..32ace65e0 Binary files /dev/null and b/solr-8.3.1/server/solr/dash/data/index/_js_1.liv differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_js_Lucene50_0.doc b/solr-8.3.1/server/solr/dash/data/index/_js_Lucene50_0.doc new file mode 100644 index 000000000..9ca1ec8bd Binary files /dev/null and b/solr-8.3.1/server/solr/dash/data/index/_js_Lucene50_0.doc differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_js_Lucene50_0.pos b/solr-8.3.1/server/solr/dash/data/index/_js_Lucene50_0.pos new file mode 100644 index 000000000..141b1fac1 Binary files /dev/null and b/solr-8.3.1/server/solr/dash/data/index/_js_Lucene50_0.pos differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_js_Lucene50_0.tim b/solr-8.3.1/server/solr/dash/data/index/_js_Lucene50_0.tim new file mode 100644 index 000000000..5d36b0e8e Binary files /dev/null and b/solr-8.3.1/server/solr/dash/data/index/_js_Lucene50_0.tim differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_js_Lucene50_0.tip b/solr-8.3.1/server/solr/dash/data/index/_js_Lucene50_0.tip new file mode 100644 index 000000000..1f7c13c9c Binary files /dev/null and b/solr-8.3.1/server/solr/dash/data/index/_js_Lucene50_0.tip differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_js_Lucene80_0.dvd b/solr-8.3.1/server/solr/dash/data/index/_js_Lucene80_0.dvd new file mode 100644 index 000000000..0ab5498b2 Binary files /dev/null and b/solr-8.3.1/server/solr/dash/data/index/_js_Lucene80_0.dvd differ diff --git a/solr-8.3.1/server/solr/dash/data/index/_js_Lucene80_0.dvm b/solr-8.3.1/server/solr/dash/data/index/_js_Lucene80_0.dvm new file mode 100644 index 000000000..294b9d631 Binary files /dev/null and b/solr-8.3.1/server/solr/dash/data/index/_js_Lucene80_0.dvm differ diff --git a/solr-8.3.1/server/solr/dash/data/index/segments_d b/solr-8.3.1/server/solr/dash/data/index/segments_d deleted file mode 100644 index 1e98e2f77..000000000 Binary files a/solr-8.3.1/server/solr/dash/data/index/segments_d and /dev/null differ diff --git a/solr-8.3.1/server/solr/dash/data/index/segments_e b/solr-8.3.1/server/solr/dash/data/index/segments_e new file mode 100644 index 000000000..0c6b0903f Binary files /dev/null and b/solr-8.3.1/server/solr/dash/data/index/segments_e differ diff --git a/solr-8.3.1/server/solr/dash/data/tlog/tlog.0000000000000000012 b/solr-8.3.1/server/solr/dash/data/tlog/tlog.0000000000000000012 index c2322ffbe..ec2251d98 100644 Binary files a/solr-8.3.1/server/solr/dash/data/tlog/tlog.0000000000000000012 and b/solr-8.3.1/server/solr/dash/data/tlog/tlog.0000000000000000012 differ diff --git a/solr-8.3.1/server/tmp/start_5201032335657884982.properties b/solr-8.3.1/server/tmp/start_5201032335657884982.properties new file mode 100644 index 000000000..e3a72dc76 --- /dev/null +++ b/solr-8.3.1/server/tmp/start_5201032335657884982.properties @@ -0,0 +1,11 @@ +#start.jar properties +#Wed Jan 08 17:27:07 UTC 2020 +java.version.platform=8 +java.version=1.8.0_131 +java.version.micro=0 +jetty.home=C\:\\Users\\avd\\Desktop\\Sam\\Dash-Web\\solr-8.3.1\\server +java.version.minor=8 +jetty.home.uri=file\:///C\:/Users/avd/Desktop/Sam/Dash-Web/solr-8.3.1/server +jetty.base=C\:\\Users\\avd\\Desktop\\Sam\\Dash-Web\\solr-8.3.1\\server +java.version.major=1 +jetty.base.uri=file\:///C\:/Users/avd/Desktop/Sam/Dash-Web/solr-8.3.1/server diff --git a/src/server/ApiManagers/SearchManager.ts b/src/server/ApiManagers/SearchManager.ts index c1c908088..cb30c9552 100644 --- a/src/server/ApiManagers/SearchManager.ts +++ b/src/server/ApiManagers/SearchManager.ts @@ -73,7 +73,7 @@ export namespace SolrManager { const args = status ? "start" : "stop -p 8983"; try { console.log(`Solr management: trying to ${args}`); - console.log(execSync(`./solr.cmd ${args}`, { cwd: "./solr-8.3.1/bin" })); + console.log(execSync(`${process.platform === "win32" ? "solr.cmd" : "solr"} ${args}`, { cwd: "./solr-8.3.1/bin" }).toString()); return true; } catch (e) { console.log(red(`Solr management error: unable to ${args}`)); diff --git a/src/server/DashSession.ts b/src/server/DashSession.ts index 22bcbadc9..83ce7caaf 100644 --- a/src/server/DashSession.ts +++ b/src/server/DashSession.ts @@ -49,7 +49,7 @@ export class DashSessionAgent extends Session.AppliedSessionAgent { } }); monitor.addReplCommand("pull", [], () => execSync("git pull", { stdio: ["ignore", "inherit", "inherit"] })); - monitor.addReplCommand("solr", [/start|stop/g], args => SolrManager.SetRunning(args[0] === "start")); + monitor.addReplCommand("solr", [/start|stop/], args => SolrManager.SetRunning(args[0] === "start")); return monitor; } -- cgit v1.2.3-70-g09d2 From d8361df45515c9724dcf0400a2d9484118b4cd71 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 8 Jan 2020 22:04:58 -0500 Subject: configuration assignment improvements, exec log and more granularity for identifiers --- session.config.json | 8 +- .../solr/dash/data/tlog/tlog.0000000000000000014 | Bin 0 -> 56466 bytes src/server/ApiManagers/SearchManager.ts | 16 +- src/server/DashSession.ts | 13 +- src/server/Session/session.ts | 201 ++++++++++++++------- src/server/Session/session_config_schema.ts | 72 +++++--- 6 files changed, 208 insertions(+), 102 deletions(-) create mode 100644 solr-8.3.1/server/solr/dash/data/tlog/tlog.0000000000000000014 (limited to 'src/server/DashSession.ts') diff --git a/session.config.json b/session.config.json index 57ca9e3cc..f613dd904 100644 --- a/session.config.json +++ b/session.config.json @@ -4,7 +4,9 @@ "server": 1050, "socket": 4321 }, - "pollingRoute": "/serverHeartbeat", - "pollingIntervalSeconds": 15, - "pollingFailureTolerance": 0 + "polling": { + "route": "/serverHeartbeat", + "intervalSeconds": 15, + "failureTolerance": 0 + } } \ No newline at end of file diff --git a/solr-8.3.1/server/solr/dash/data/tlog/tlog.0000000000000000014 b/solr-8.3.1/server/solr/dash/data/tlog/tlog.0000000000000000014 new file mode 100644 index 000000000..e39ac337f Binary files /dev/null and b/solr-8.3.1/server/solr/dash/data/tlog/tlog.0000000000000000014 differ diff --git a/src/server/ApiManagers/SearchManager.ts b/src/server/ApiManagers/SearchManager.ts index 316ba09ed..4ce12f9f3 100644 --- a/src/server/ApiManagers/SearchManager.ts +++ b/src/server/ApiManagers/SearchManager.ts @@ -8,6 +8,7 @@ import { red, cyan, yellow } from "colors"; import RouteSubscriber from "../RouteSubscriber"; import { exec } from "child_process"; import { onWindows } from ".."; +import { get } from "request-promise"; export class SearchManager extends ApiManager { @@ -68,18 +69,25 @@ export class SearchManager extends ApiManager { export namespace SolrManager { + const command = onWindows ? "solr.cmd" : "solr"; + export async function SetRunning(status: boolean): Promise { const args = status ? "start" : "stop -p 8983"; - console.log(`Solr management: trying to ${args}`); - exec(`${onWindows ? "solr.cmd" : "solr"} ${args}`, { cwd: "./solr-8.3.1/bin" }, (error, stdout, stderr) => { + console.log(`solr management: trying to ${args}`); + exec(`${command} ${args}`, { cwd: "./solr-8.3.1/bin" }, (error, stdout, stderr) => { if (error) { + console.log(red(`solr management error: unable to ${args} server`)); console.log(red(error.message)); - console.log(red(`Solr management error: unable to ${args}`)); } console.log(cyan(stdout)); console.log(yellow(stderr)); }); - return true; + try { + await get("http://localhost:8983"); + return true; + } catch { + return false; + } } } \ No newline at end of file diff --git a/src/server/DashSession.ts b/src/server/DashSession.ts index 83ce7caaf..47a63c64f 100644 --- a/src/server/DashSession.ts +++ b/src/server/DashSession.ts @@ -1,8 +1,8 @@ import { Session } from "./Session/session"; import { Email } from "./ActionUtilities"; -import { red, yellow } from "colors"; +import { red, yellow, cyan } from "colors"; import { SolrManager } from "./ApiManagers/SearchManager"; -import { execSync } from "child_process"; +import { exec } from "child_process"; import { Utils } from "../Utils"; import { WebSocket } from "./Websocket/Websocket"; import { MessageStore } from "./Message"; @@ -48,7 +48,14 @@ export class DashSessionAgent extends Session.AppliedSessionAgent { return true; } }); - monitor.addReplCommand("pull", [], () => execSync("git pull", { stdio: ["ignore", "inherit", "inherit"] })); + monitor.addReplCommand("pull", [], () => exec("git pull", (error, stdout, stderr) => { + if (error) { + monitor.log(red("unable to pull from version control")); + monitor.log(red(error.message)); + } + stdout.split("\n").forEach(line => line.length && monitor.execLog(cyan(line))); + stderr.split("\n").forEach(line => line.length && monitor.execLog(yellow(line))); + })); monitor.addReplCommand("solr", [/start|stop/], args => SolrManager.SetRunning(args[0] === "start")); return monitor; } diff --git a/src/server/Session/session.ts b/src/server/Session/session.ts index 06a076ae4..9a222b2eb 100644 --- a/src/server/Session/session.ts +++ b/src/server/Session/session.ts @@ -1,4 +1,4 @@ -import { red, cyan, green, yellow, magenta, blue, white } from "colors"; +import { red, cyan, green, yellow, magenta, blue, white, Color, grey, gray, black } from "colors"; import { on, fork, setupMaster, Worker, isMaster, isWorker } from "cluster"; import { get } from "request-promise"; import { Utils } from "../../Utils"; @@ -20,6 +20,20 @@ import { configurationSchema } from "./session_config_schema"; */ export namespace Session { + type ColorLabel = "yellow" | "red" | "cyan" | "green" | "blue" | "magenta" | "grey" | "gray" | "white" | "black"; + const colorMapping: Map = new Map([ + ["yellow", yellow], + ["red", red], + ["cyan", cyan], + ["green", green], + ["blue", blue], + ["magenta", magenta], + ["grey", grey], + ["gray", gray], + ["white", white], + ["black", black] + ]); + export abstract class AppliedSessionAgent { // the following two methods allow the developer to create a custom @@ -70,25 +84,50 @@ export namespace Session { } + interface Identifier { + text: string; + color: ColorLabel; + } + + interface Identifiers { + master: Identifier; + worker: Identifier; + exec: Identifier; + } + interface Configuration { showServerOutput: boolean; - masterIdentifier: string; - workerIdentifier: string; + identifiers: Identifiers; ports: { [description: string]: number }; - pollingRoute: string; - pollingIntervalSeconds: number; - pollingFailureTolerance: number; - [key: string]: any; + polling: { + route: string; + intervalSeconds: number; + failureTolerance: number; + }; } - const defaultConfiguration: Configuration = { + const defaultConfig: Configuration = { showServerOutput: false, - masterIdentifier: yellow("__monitor__:"), - workerIdentifier: magenta("__server__:"), + identifiers: { + master: { + text: "__monitor__", + color: "yellow" + }, + worker: { + text: "__server__", + color: "magenta" + }, + exec: { + text: "__exec__", + color: "green" + } + }, ports: { server: 3000 }, - pollingRoute: "/", - pollingIntervalSeconds: 30, - pollingFailureTolerance: 0 + polling: { + route: "/", + intervalSeconds: 30, + failureTolerance: 0 + } }; export type ExitHandler = (reason: Error | null) => void | Promise; @@ -118,7 +157,7 @@ export namespace Session { private static count = 0; private exitHandlers: ExitHandler[] = []; private readonly notifiers: Monitor.NotifierHooks | undefined; - private readonly configuration: Configuration; + private readonly config: Configuration; private onMessage: { [message: string]: Monitor.ServerMessageHandler[] | undefined } = {}; private activeWorker: Worker | undefined; private key: string | undefined; @@ -209,10 +248,11 @@ export namespace Session { console.log(this.timestamp(), cyan("initializing session...")); - this.configuration = this.loadAndValidateConfiguration(); + this.config = this.loadAndValidateConfiguration(); + this.initializeSessionKey(); // determines whether or not we see the compilation / initialization / runtime output of each child server process - setupMaster({ silent: !this.configuration.showServerOutput }); + setupMaster({ silent: !this.config.showServerOutput }); // handle exceptions in the master thread - there shouldn't be many of these // the IPC (inter process communication) channel closed exception can't seem @@ -238,7 +278,6 @@ export namespace Session { this.spawn(); } - /** * Generates a blue UTC string associated with the time * of invocation. @@ -249,7 +288,14 @@ export namespace Session { * A formatted, identified and timestamped log in color */ public log = (...optionalParams: any[]) => { - console.log(this.timestamp(), this.configuration.masterIdentifier, ...optionalParams); + console.log(this.timestamp(), this.config.identifiers.master.text, ...optionalParams); + } + + /** + * A formatted, identified and timestamped log in color for non- + */ + public execLog = (...optionalParams: any[]) => { + console.log(this.timestamp(), this.config.identifiers.exec.text, ...optionalParams); } /** @@ -269,30 +315,24 @@ export namespace Session { } /** - * Builds the repl that allows the following commands to be typed into stdin of the master thread. + * At any arbitrary layer of nesting within the configuration objects, any single value that + * is not specified by the configuration is given the default counterpart. If, within an object, + * one peer is given by configuration and two are not, the one is preserved while the two are given + * the default value. */ - private initializeRepl = (): Repl => { - const repl = new Repl({ identifier: () => `${this.timestamp()} ${this.configuration.masterIdentifier}` }); - const boolean = /true|false/; - const number = /\d+/; - const letters = /[a-zA-Z]+/; - repl.registerCommand("exit", [/clean|force/], args => this.killSession("manual exit requested by repl", args[0] === "clean", 0)); - repl.registerCommand("restart", [/clean|force/], args => this.tryKillActiveWorker(args[0] === "clean")); - repl.registerCommand("set", [letters, "port", number, boolean], args => this.setPort(args[0], Number(args[2]), args[3] === "true")); - repl.registerCommand("set", [/polling/, number, boolean], args => { - const newPollingIntervalSeconds = Math.floor(Number(args[2])); - if (newPollingIntervalSeconds < 0) { - this.log(red("the polling interval must be a non-negative integer")); - } else { - if (newPollingIntervalSeconds !== this.configuration.pollingIntervalSeconds) { - this.configuration.pollingIntervalSeconds = newPollingIntervalSeconds; - if (args[3] === "true") { - this.activeWorker?.send({ newPollingIntervalSeconds }); - } + private assign = (defaultObject: any, specifiedObject: any, collector: any) => { + Array.from(new Set([...Object.keys(defaultObject), ...Object.keys(specifiedObject)])).map(property => { + let defaultValue: any, specifiedValue: any; + if (specifiedValue = specifiedObject[property]) { + if (typeof specifiedValue === "object" && typeof (defaultValue = defaultObject[property]) === "object") { + this.assign(defaultValue, specifiedValue, collector[property] = {}); + } else { + collector[property] = specifiedValue; } + } else { + collector[property] = defaultObject[property]; } }); - return repl; } /** @@ -300,34 +340,19 @@ export namespace Session { * and pass down any variables the pertinent to the child processes as environment variables. */ private loadAndValidateConfiguration = (): Configuration => { + let config: Configuration; try { console.log(this.timestamp(), cyan("validating configuration...")); - const configuration: Configuration = JSON.parse(readFileSync('./session.config.json', 'utf8')); + config = JSON.parse(readFileSync('./session.config.json', 'utf8')); const options = { throwError: true, allowUnknownAttributes: false }; // ensure all necessary and no excess information is specified by the configuration file - validate(configuration, configurationSchema, options); - let formatMaster = true; - let formatWorker = true; - Object.keys(defaultConfiguration).forEach(property => { - if (!configuration[property]) { - if (property === "masterIdentifier") { - formatMaster = false; - } else if (property === "workerIdentifier") { - formatWorker = false; - } - configuration[property] = defaultConfiguration[property]; - } - }); - if (formatMaster) { - configuration.masterIdentifier = yellow(configuration.masterIdentifier + ":"); - } - if (formatWorker) { - configuration.workerIdentifier = magenta(configuration.workerIdentifier + ":"); - } - return configuration; + validate(config, configurationSchema, options); + const results: any = {}; + this.assign(defaultConfig, config, results); + config = results; } catch (error) { if (error instanceof ValidationError) { console.log(red("\nSession configuration failed.")); @@ -337,16 +362,50 @@ export namespace Session { } else if (error.code === "ENOENT" && error.path === "./session.config.json") { console.log(cyan("Loading default session parameters...")); console.log("Consider including a session.config.json configuration file in your project root for customization."); - return defaultConfiguration; + config = { ...defaultConfig }; } else { console.log(red("\nSession configuration failed.")); console.log("The following unknown error occurred during configuration."); console.log(error.stack); process.exit(0); } + } finally { + const { identifiers } = config!; + Object.keys(identifiers).forEach(key => { + const resolved = key as keyof Identifiers; + const { text, color } = identifiers[resolved]; + identifiers[resolved].text = (colorMapping.get(color) || white)(`${text}:`); + }); + return config!; } } + /** + * Builds the repl that allows the following commands to be typed into stdin of the master thread. + */ + private initializeRepl = (): Repl => { + const repl = new Repl({ identifier: () => `${this.timestamp()} ${this.config.identifiers.master.text}` }); + const boolean = /true|false/; + const number = /\d+/; + const letters = /[a-zA-Z]+/; + repl.registerCommand("exit", [/clean|force/], args => this.killSession("manual exit requested by repl", args[0] === "clean", 0)); + repl.registerCommand("restart", [/clean|force/], args => this.tryKillActiveWorker(args[0] === "clean")); + repl.registerCommand("set", [letters, "port", number, boolean], args => this.setPort(args[0], Number(args[2]), args[3] === "true")); + repl.registerCommand("set", [/polling/, number, boolean], args => { + const newPollingIntervalSeconds = Math.floor(Number(args[2])); + if (newPollingIntervalSeconds < 0) { + this.log(red("the polling interval must be a non-negative integer")); + } else { + if (newPollingIntervalSeconds !== this.config.polling.intervalSeconds) { + this.config.polling.intervalSeconds = newPollingIntervalSeconds; + if (args[3] === "true") { + this.activeWorker?.send({ newPollingIntervalSeconds }); + } + } + } + }); + return repl; + } private executeExitHandlers = async (reason: Error | null) => Promise.all(this.exitHandlers.map(handler => handler(reason))); @@ -374,7 +433,7 @@ export namespace Session { */ private setPort = (port: "server" | "socket" | string, value: number, immediateRestart: boolean): void => { if (value > 1023 && value < 65536) { - this.configuration.ports[port] = value; + this.config.ports[port] = value; if (immediateRestart) { this.tryKillActiveWorker(); } @@ -389,18 +448,20 @@ export namespace Session { */ private spawn = (): void => { const { - pollingRoute, - pollingFailureTolerance, - pollingIntervalSeconds, + polling: { + route, + failureTolerance, + intervalSeconds + }, ports - } = this.configuration; + } = this.config; this.tryKillActiveWorker(); this.activeWorker = fork({ - pollingRoute, - pollingFailureTolerance, + pollingRoute: route, + pollingFailureTolerance: failureTolerance, serverPort: ports.server, socketPort: ports.socket, - pollingIntervalSeconds, + pollingIntervalSeconds: intervalSeconds, session_key: this.key }); this.log(cyan(`spawned new server worker with process id ${this.activeWorker.process.pid}`)); @@ -408,7 +469,7 @@ export namespace Session { this.activeWorker.on("message", async ({ lifecycle, action }) => { if (action) { const { message, args } = action as Monitor.Action; - console.log(this.timestamp(), `${this.configuration.workerIdentifier} action requested (${cyan(message)})`); + console.log(this.timestamp(), `${this.config.identifiers.worker.text} action requested (${cyan(message)})`); switch (message) { case "kill": const { reason, graceful, errorCode } = args; @@ -432,7 +493,7 @@ export namespace Session { handlers.forEach(handler => handler({ message, args })); } } else if (lifecycle) { - console.log(this.timestamp(), `${this.configuration.workerIdentifier} lifecycle phase (${lifecycle})`); + console.log(this.timestamp(), `${this.config.identifiers.worker.text} lifecycle phase (${lifecycle})`); } }); } diff --git a/src/server/Session/session_config_schema.ts b/src/server/Session/session_config_schema.ts index 5a85a45e3..e32cf8c6a 100644 --- a/src/server/Session/session_config_schema.ts +++ b/src/server/Session/session_config_schema.ts @@ -1,39 +1,67 @@ import { Schema } from "jsonschema"; +const colorPattern = /black|red|green|yellow|blue|magenta|cyan|white|gray|grey/; + +const identifierProperties: Schema = { + type: "object", + properties: { + text: { + type: "string", + minLength: 1 + }, + color: { + type: "string", + pattern: colorPattern + } + } +}; + +const portProperties: Schema = { + type: "number", + minimum: 1024, + maximum: 65535 +}; + export const configurationSchema: Schema = { id: "/configuration", type: "object", properties: { + showServerOutput: { type: "boolean" }, ports: { type: "object", properties: { - server: { type: "number", minimum: 1024, maximum: 65535 }, - socket: { type: "number", minimum: 1024, maximum: 65535 } + server: portProperties, + socket: portProperties }, required: ["server"], additionalProperties: true }, - pollingRoute: { - type: "string", - pattern: /\/[a-zA-Z]*/g - }, - masterIdentifier: { - type: "string", - minLength: 1 - }, - workerIdentifier: { - type: "string", - minLength: 1 + identifiers: { + type: "object", + properties: { + master: identifierProperties, + worker: identifierProperties, + exec: identifierProperties + } }, - showServerOutput: { type: "boolean" }, - pollingIntervalSeconds: { - type: "number", - minimum: 1, - maximum: 86400 + polling: { + type: "object", + additionalProperties: false, + properties: { + intervalSeconds: { + type: "number", + minimum: 1, + maximum: 86400 + }, + route: { + type: "string", + pattern: /\/[a-zA-Z]*/g + }, + failureTolerance: { + type: "number", + minimum: 0, + } + } }, - pollingFailureTolerance: { - type: "number", - minimum: 0, - } } }; \ No newline at end of file -- cgit v1.2.3-70-g09d2 From d6d13de160b738f7c10af5408a15633cfcd9044a Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 8 Jan 2020 23:39:02 -0500 Subject: no longer await repl --- src/server/DashSession.ts | 27 +++++++++++---------- src/server/Session/session.ts | 55 ++++++++++++++++++++++++++++++------------- src/server/repl.ts | 9 +++++-- 3 files changed, 60 insertions(+), 31 deletions(-) (limited to 'src/server/DashSession.ts') diff --git a/src/server/DashSession.ts b/src/server/DashSession.ts index 47a63c64f..7a1547e2f 100644 --- a/src/server/DashSession.ts +++ b/src/server/DashSession.ts @@ -1,8 +1,7 @@ import { Session } from "./Session/session"; import { Email } from "./ActionUtilities"; -import { red, yellow, cyan } from "colors"; -import { SolrManager } from "./ApiManagers/SearchManager"; -import { exec } from "child_process"; +import { red, yellow } from "colors"; +import { get } from "request-promise"; import { Utils } from "../Utils"; import { WebSocket } from "./Websocket/Websocket"; import { MessageStore } from "./Message"; @@ -26,7 +25,7 @@ export class DashSessionAgent extends Session.AppliedSessionAgent { const content = `The key for this session (started @ ${new Date().toUTCString()}) is ${key}.\n\n${this.signature}`; const failures = await Email.dispatchAll(this.notificationRecipients, "Server Termination Key", content); if (failures) { - failures.map(({ recipient, error: { message } }) => monitor.log(red(`dispatch failure @ ${recipient} (${yellow(message)})`))); + failures.map(({ recipient, error: { message } }) => monitor.mainLog(red(`dispatch failure @ ${recipient} (${yellow(message)})`))); return false; } return true; @@ -42,21 +41,23 @@ export class DashSessionAgent extends Session.AppliedSessionAgent { const content = `${body}\n\n${this.signature}`; const failures = await Email.dispatchAll(this.notificationRecipients, "Dash Web Server Crash", content); if (failures) { - failures.map(({ recipient, error: { message } }) => monitor.log(red(`dispatch failure @ ${recipient} (${yellow(message)})`))); + failures.map(({ recipient, error: { message } }) => monitor.mainLog(red(`dispatch failure @ ${recipient} (${yellow(message)})`))); return false; } return true; } }); - monitor.addReplCommand("pull", [], () => exec("git pull", (error, stdout, stderr) => { - if (error) { - monitor.log(red("unable to pull from version control")); - monitor.log(red(error.message)); + monitor.addReplCommand("pull", [], () => monitor.exec("git pull")); + monitor.addReplCommand("solr", [/start|stop/], async args => { + const command = args[0] === "start" ? "start" : "stop -p 8983"; + await monitor.exec(command, { cwd: "./solr-8.3.1/bin" }); + try { + await get("http://localhost:8983"); + return true; + } catch { + return false; } - stdout.split("\n").forEach(line => line.length && monitor.execLog(cyan(line))); - stderr.split("\n").forEach(line => line.length && monitor.execLog(yellow(line))); - })); - monitor.addReplCommand("solr", [/start|stop/], args => SolrManager.SetRunning(args[0] === "start")); + }); return monitor; } diff --git a/src/server/Session/session.ts b/src/server/Session/session.ts index 9a222b2eb..867d02a0f 100644 --- a/src/server/Session/session.ts +++ b/src/server/Session/session.ts @@ -6,6 +6,7 @@ import Repl, { ReplAction } from "../repl"; import { readFileSync } from "fs"; import { validate, ValidationError } from "jsonschema"; import { configurationSchema } from "./session_config_schema"; +import { exec, ExecOptions } from "child_process"; /** * This namespace relies on NodeJS's cluster module, which allows a parent (master) process to share @@ -191,8 +192,8 @@ export namespace Session { * requests to complete) or immediately. */ public killSession = async (reason: string, graceful = true, errorCode = 0) => { - this.log(cyan(`exiting session ${graceful ? "clean" : "immediate"}ly`)); - this.log(`reason: ${(red(reason))}`); + this.mainLog(cyan(`exiting session ${graceful ? "clean" : "immediate"}ly`)); + this.mainLog(`reason: ${(red(reason))}`); await this.executeExitHandlers(null); this.tryKillActiveWorker(graceful); process.exit(errorCode); @@ -212,6 +213,26 @@ export namespace Session { this.repl.registerCommand(basename, argPatterns, action); } + public exec = (command: string, options?: ExecOptions) => { + return new Promise(resolve => { + exec(command, { ...options, encoding: "utf8" }, (error, stdout, stderr) => { + if (error) { + this.execLog(red(`unable to execute ${white(command)}`)); + error.message.split("\n").forEach(line => line.length && this.execLog(red(`(error) ${line}`))); + } else { + let outLines: string[], errorLines: string[]; + if ((outLines = stdout.split("\n").filter(line => line.length)).length) { + outLines.forEach(line => line.length && this.execLog(cyan(`(stdout) ${line}`))); + } + if ((errorLines = stderr.split("\n").filter(line => line.length)).length) { + errorLines.forEach(line => line.length && this.execLog(yellow(`(stderr) ${line}`))); + } + } + resolve(); + }); + }); + } + /** * Add a listener at this message. When the monitor process * receives a message, it will invoke all registered functions. @@ -259,9 +280,9 @@ export namespace Session { // to be caught in a try catch, and is inconsequential, so it is ignored process.on("uncaughtException", ({ message, stack }): void => { if (message !== "Channel closed") { - this.log(red(message)); + this.mainLog(red(message)); if (stack) { - this.log(`uncaught exception\n${red(stack)}`); + this.mainLog(`uncaught exception\n${red(stack)}`); } } }); @@ -269,7 +290,7 @@ export namespace Session { // a helpful cluster event called on the master thread each time a child process exits on("exit", ({ process: { pid } }, code, signal) => { const prompt = `server worker with process id ${pid} has exited with code ${code}${signal === null ? "" : `, having encountered signal ${signal}`}.`; - this.log(cyan(prompt)); + this.mainLog(cyan(prompt)); // to make this a robust, continuous session, every time a child process dies, we immediately spawn a new one this.spawn(); }); @@ -287,14 +308,14 @@ export namespace Session { /** * A formatted, identified and timestamped log in color */ - public log = (...optionalParams: any[]) => { + public mainLog = (...optionalParams: any[]) => { console.log(this.timestamp(), this.config.identifiers.master.text, ...optionalParams); } /** * A formatted, identified and timestamped log in color for non- */ - public execLog = (...optionalParams: any[]) => { + private execLog = (...optionalParams: any[]) => { console.log(this.timestamp(), this.config.identifiers.exec.text, ...optionalParams); } @@ -310,7 +331,7 @@ export namespace Session { this.key = Utils.GenerateGuid(); const success = await this.notifiers.key(this.key); const statement = success ? green("distributed session key to recipients") : red("distribution of session key failed"); - this.log(statement); + this.mainLog(statement); } } @@ -394,7 +415,7 @@ export namespace Session { repl.registerCommand("set", [/polling/, number, boolean], args => { const newPollingIntervalSeconds = Math.floor(Number(args[2])); if (newPollingIntervalSeconds < 0) { - this.log(red("the polling interval must be a non-negative integer")); + this.mainLog(red("the polling interval must be a non-negative integer")); } else { if (newPollingIntervalSeconds !== this.config.polling.intervalSeconds) { this.config.polling.intervalSeconds = newPollingIntervalSeconds; @@ -413,11 +434,12 @@ export namespace Session { * Attempts to kill the active worker gracefully, unless otherwise specified. */ private tryKillActiveWorker = (graceful = true): boolean => { - if (!this.activeWorker?.isDead()) { + if (this.activeWorker && !this.activeWorker.isDead()) { + this.mainLog(cyan(`${graceful ? "graceful" : "immediate"}ly killing the active server worker`)); if (graceful) { - this.activeWorker?.send({ manualExit: true }); + this.activeWorker.send({ manualExit: true }); } else { - this.activeWorker?.process.kill(); + this.activeWorker.process.kill(); } return true; } @@ -438,7 +460,7 @@ export namespace Session { this.tryKillActiveWorker(); } } else { - this.log(red(`${port} is an invalid port number`)); + this.mainLog(red(`${port} is an invalid port number`)); } } @@ -464,7 +486,7 @@ export namespace Session { pollingIntervalSeconds: intervalSeconds, session_key: this.key }); - this.log(cyan(`spawned new server worker with process id ${this.activeWorker.process.pid}`)); + this.mainLog(cyan(`spawned new server worker with process id ${this.activeWorker.process.pid}`)); // an IPC message handler that executes actions on the master thread when prompted by the active worker this.activeWorker.on("message", async ({ lifecycle, action }) => { if (action) { @@ -480,7 +502,7 @@ export namespace Session { const { error } = args; const success = await this.notifiers.crash(error); const statement = success ? green("distributed crash notification to recipients") : red("distribution of crash notification failed"); - this.log(statement); + this.mainLog(statement); } break; case "set_port": @@ -492,7 +514,8 @@ export namespace Session { if (handlers) { handlers.forEach(handler => handler({ message, args })); } - } else if (lifecycle) { + } + if (lifecycle) { console.log(this.timestamp(), `${this.config.identifiers.worker.text} lifecycle phase (${lifecycle})`); } }); diff --git a/src/server/repl.ts b/src/server/repl.ts index c4526528e..ad55b6aaa 100644 --- a/src/server/repl.ts +++ b/src/server/repl.ts @@ -109,8 +109,13 @@ export default class Repl { } } if (!length || matched) { - await action(parsed); - this.valid(`${command} ${parsed.join(" ")}`); + const result = action(parsed); + const resolve = () => this.valid(`${command} ${parsed.join(" ")}`); + if (result instanceof Promise) { + result.then(resolve); + } else { + resolve(); + } return; } } -- cgit v1.2.3-70-g09d2 From dd12a4ae231cab4dac1ae8b21007191dac1c1d6e Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 9 Jan 2020 02:11:04 -0500 Subject: solr cmd fix, unhandled rejection handler, stdio instead of silent and websocket null check --- src/server/DashSession.ts | 15 +++++++++------ src/server/Session/session.ts | 21 ++++++++++----------- 2 files changed, 19 insertions(+), 17 deletions(-) (limited to 'src/server/DashSession.ts') diff --git a/src/server/DashSession.ts b/src/server/DashSession.ts index 7a1547e2f..a0e00adda 100644 --- a/src/server/DashSession.ts +++ b/src/server/DashSession.ts @@ -1,11 +1,11 @@ import { Session } from "./Session/session"; import { Email } from "./ActionUtilities"; -import { red, yellow } from "colors"; +import { red, yellow, green } from "colors"; import { get } from "request-promise"; import { Utils } from "../Utils"; import { WebSocket } from "./Websocket/Websocket"; import { MessageStore } from "./Message"; -import { launchServer } from "."; +import { launchServer, onWindows } from "."; /** * If we're the monitor (master) thread, we should launch the monitor logic for the session. @@ -49,13 +49,13 @@ export class DashSessionAgent extends Session.AppliedSessionAgent { }); monitor.addReplCommand("pull", [], () => monitor.exec("git pull")); monitor.addReplCommand("solr", [/start|stop/], async args => { - const command = args[0] === "start" ? "start" : "stop -p 8983"; + const command = `${onWindows ? "solr.cmd" : "solr"} ${args[0] === "start" ? "start" : "stop -p 8983"}`; await monitor.exec(command, { cwd: "./solr-8.3.1/bin" }); try { await get("http://localhost:8983"); - return true; + monitor.mainLog(green("successfully connected to 8983 after running solr initialization")); } catch { - return false; + monitor.mainLog(red("unable to connect at 8983 after running solr initialization")); } }); return monitor; @@ -63,7 +63,10 @@ export class DashSessionAgent extends Session.AppliedSessionAgent { protected async launchServerWorker() { const worker = Session.ServerWorker.Create(launchServer); // server initialization delegated to worker - worker.addExitHandler(() => Utils.Emit(WebSocket._socket, MessageStore.ConnectionTerminated, "Manual")); + worker.addExitHandler(() => { + const { _socket } = WebSocket; + _socket && Utils.Emit(_socket, MessageStore.ConnectionTerminated, "Manual"); + }); return worker; } diff --git a/src/server/Session/session.ts b/src/server/Session/session.ts index 867d02a0f..6967ece52 100644 --- a/src/server/Session/session.ts +++ b/src/server/Session/session.ts @@ -195,7 +195,7 @@ export namespace Session { this.mainLog(cyan(`exiting session ${graceful ? "clean" : "immediate"}ly`)); this.mainLog(`reason: ${(red(reason))}`); await this.executeExitHandlers(null); - this.tryKillActiveWorker(graceful); + this.killActiveWorker(graceful); process.exit(errorCode); } @@ -273,7 +273,8 @@ export namespace Session { this.initializeSessionKey(); // determines whether or not we see the compilation / initialization / runtime output of each child server process - setupMaster({ silent: !this.config.showServerOutput }); + const output = this.config.showServerOutput ? "inherit" : "ignore"; + setupMaster({ stdio: ["ignore", output, output, "ipc"] }); // handle exceptions in the master thread - there shouldn't be many of these // the IPC (inter process communication) channel closed exception can't seem @@ -410,7 +411,7 @@ export namespace Session { const number = /\d+/; const letters = /[a-zA-Z]+/; repl.registerCommand("exit", [/clean|force/], args => this.killSession("manual exit requested by repl", args[0] === "clean", 0)); - repl.registerCommand("restart", [/clean|force/], args => this.tryKillActiveWorker(args[0] === "clean")); + repl.registerCommand("restart", [/clean|force/], args => this.killActiveWorker(args[0] === "clean")); repl.registerCommand("set", [letters, "port", number, boolean], args => this.setPort(args[0], Number(args[2]), args[3] === "true")); repl.registerCommand("set", [/polling/, number, boolean], args => { const newPollingIntervalSeconds = Math.floor(Number(args[2])); @@ -433,17 +434,14 @@ export namespace Session { /** * Attempts to kill the active worker gracefully, unless otherwise specified. */ - private tryKillActiveWorker = (graceful = true): boolean => { + private killActiveWorker = (graceful = true): void => { if (this.activeWorker && !this.activeWorker.isDead()) { - this.mainLog(cyan(`${graceful ? "graceful" : "immediate"}ly killing the active server worker`)); if (graceful) { this.activeWorker.send({ manualExit: true }); } else { this.activeWorker.process.kill(); } - return true; } - return false; } /** @@ -457,7 +455,7 @@ export namespace Session { if (value > 1023 && value < 65536) { this.config.ports[port] = value; if (immediateRestart) { - this.tryKillActiveWorker(); + this.killActiveWorker(); } } else { this.mainLog(red(`${port} is an invalid port number`)); @@ -477,7 +475,7 @@ export namespace Session { }, ports } = this.config; - this.tryKillActiveWorker(); + this.killActiveWorker(); this.activeWorker = fork({ pollingRoute: route, pollingFailureTolerance: failureTolerance, @@ -610,6 +608,7 @@ export namespace Session { // one reason to exit, as the process might be in an inconsistent state after such an exception process.on('uncaughtException', this.proactiveUnplannedExit); + process.on('unhandledRejection', this.proactiveUnplannedExit); } /** @@ -627,14 +626,14 @@ export namespace Session { * Called whenever the process has a reason to terminate, either through an uncaught exception * in the process (potentially inconsistent state) or the server cannot be reached. */ - private proactiveUnplannedExit = async (error: Error): Promise => { + private proactiveUnplannedExit = async (error: any): Promise => { this.shouldServerBeResponsive = false; // communicates via IPC to the master thread that it should dispatch a crash notification email this.sendMonitorAction("notify_crash", { error }); await this.executeExitHandlers(error); // notify master thread (which will log update in the console) of crash event via IPC this.lifecycleNotification(red(`crash event detected @ ${new Date().toUTCString()}`)); - this.lifecycleNotification(red(error.message)); + this.lifecycleNotification(red(error.message || error)); process.exit(1); } -- cgit v1.2.3-70-g09d2