aboutsummaryrefslogtreecommitdiff
path: root/src/server/ApiManagers
diff options
context:
space:
mode:
Diffstat (limited to 'src/server/ApiManagers')
-rw-r--r--src/server/ApiManagers/DeleteManager.ts31
-rw-r--r--src/server/ApiManagers/DownloadManager.ts6
-rw-r--r--src/server/ApiManagers/GooglePhotosManager.ts131
-rw-r--r--src/server/ApiManagers/SearchManager.ts152
-rw-r--r--src/server/ApiManagers/SessionManager.ts9
-rw-r--r--src/server/ApiManagers/UploadManager.ts11
-rw-r--r--src/server/ApiManagers/UserManager.ts2
-rw-r--r--src/server/ApiManagers/UtilManager.ts16
8 files changed, 277 insertions, 81 deletions
diff --git a/src/server/ApiManagers/DeleteManager.ts b/src/server/ApiManagers/DeleteManager.ts
index be452c0ff..9e70af2eb 100644
--- a/src/server/ApiManagers/DeleteManager.ts
+++ b/src/server/ApiManagers/DeleteManager.ts
@@ -2,6 +2,11 @@ import ApiManager, { Registration } from "./ApiManager";
import { Method, _permission_denied, PublicHandler } from "../RouteManager";
import { WebSocket } from "../Websocket/Websocket";
import { Database } from "../database";
+import rimraf = require("rimraf");
+import { pathToDirectory, Directory } from "./UploadManager";
+import { filesDirectory } from "..";
+import { DashUploadUtils } from "../DashUploadUtils";
+import { mkdirSync } from "fs";
export default class DeleteManager extends ApiManager {
@@ -31,21 +36,19 @@ export default class DeleteManager extends ApiManager {
}
});
- const hi: PublicHandler = async ({ res, isRelease }) => {
- if (isRelease) {
- return _permission_denied(res, deletionPermissionError);
+ register({
+ method: Method.GET,
+ subscription: "/deleteAssets",
+ secureHandler: async ({ res, isRelease }) => {
+ if (isRelease) {
+ return _permission_denied(res, deletionPermissionError);
+ }
+ rimraf.sync(filesDirectory);
+ mkdirSync(filesDirectory);
+ await DashUploadUtils.buildFileDirectories();
+ res.redirect("/delete");
}
- await Database.Instance.deleteAll('users');
- res.redirect("/home");
- };
-
- // register({
- // method: Method.GET,
- // subscription: "/deleteUsers",
- // onValidation: hi,
- // onUnauthenticated: hi
- // });
-
+ });
register({
method: Method.GET,
diff --git a/src/server/ApiManagers/DownloadManager.ts b/src/server/ApiManagers/DownloadManager.ts
index 1bb84f374..01d2dfcad 100644
--- a/src/server/ApiManagers/DownloadManager.ts
+++ b/src/server/ApiManagers/DownloadManager.ts
@@ -254,11 +254,13 @@ async function writeHierarchyRecursive(file: Archiver.Archiver, hierarchy: Hiera
// and dropped in the browser and thus hosted remotely) so we upload it
// to our server and point the zip file to it, so it can bundle up the bytes
const information = await DashUploadUtils.UploadImage(result);
- path = information.serverAccessPaths[SizeSuffix.Original];
+ path = information instanceof Error ? "" : information.accessPaths[SizeSuffix.Original].server;
}
// write the file specified by the path to the directory in the
// zip file given by the prefix.
- file.file(path, { name: documentTitle, prefix });
+ if (path) {
+ file.file(path, { name: documentTitle, prefix });
+ }
} else {
// we've hit a collection, so we have to recurse
await writeHierarchyRecursive(file, result, `${prefix}/${documentTitle}`);
diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts
index 107542ce2..25c54ee2e 100644
--- a/src/server/ApiManagers/GooglePhotosManager.ts
+++ b/src/server/ApiManagers/GooglePhotosManager.ts
@@ -7,29 +7,34 @@ import { GooglePhotosUploadUtils } from "../apis/google/GooglePhotosUploadUtils"
import { Opt } from "../../new_fields/Doc";
import { DashUploadUtils, InjectSize, SizeSuffix } from "../DashUploadUtils";
import { Database } from "../database";
+import { red } from "colors";
+import { Upload } from "../SharedMediaTypes";
+const prefix = "google_photos_";
+const remoteUploadError = "None of the preliminary uploads to Google's servers was successful.";
const authenticationError = "Unable to authenticate Google credentials before uploading to Google Photos!";
const mediaError = "Unable to convert all uploaded bytes to media items!";
-const UploadError = (count: number) => `Unable to upload ${count} images to Dash's server`;
+const localUploadError = (count: number) => `Unable to upload ${count} images to Dash's server`;
const requestError = "Unable to execute download: the body's media items were malformed.";
const downloadError = "Encountered an error while executing downloads.";
+
interface GooglePhotosUploadFailure {
batch: number;
index: number;
url: string;
reason: string;
}
+
interface MediaItem {
baseUrl: string;
- filename: string;
}
+
interface NewMediaItem {
description: string;
simpleMediaItem: {
uploadToken: string;
};
}
-const prefix = "google_photos_";
/**
* This manager handles the creation of routes for google photos functionality.
@@ -38,27 +43,47 @@ export default class GooglePhotosManager extends ApiManager {
protected initialize(register: Registration): void {
+ /**
+ * This route receives a list of urls that point to images stored
+ * on Dash's file system, and, in a two step process, uploads them to Google's servers and
+ * returns the information Google generates about the associated uploaded remote images.
+ */
register({
method: Method.POST,
- subscription: "/googlePhotosMediaUpload",
+ subscription: "/googlePhotosMediaPost",
secureHandler: async ({ user, req, res }) => {
const { media } = req.body;
+
+ // first we need to ensure that we know the google account to which these photos will be uploaded
const token = await GoogleApiServerUtils.retrieveAccessToken(user.id);
if (!token) {
return _error(res, authenticationError);
}
+
+ // next, having one large list or even synchronously looping over things trips a threshold
+ // set on Google's servers, and would instantly return an error. So, we ease things out and send the photos to upload in
+ // batches of 25, where the next batch is sent 100 millieconds after we receive a response from Google's servers.
const failed: GooglePhotosUploadFailure[] = [];
const batched = BatchedArray.from<GooglePhotosUploadUtils.UploadSource>(media, { batchSize: 25 });
+ const interval = { magnitude: 100, unit: TimeUnit.Milliseconds };
const newMediaItems = await batched.batchedMapPatientInterval<NewMediaItem>(
- { magnitude: 100, unit: TimeUnit.Milliseconds },
+ interval,
async (batch: any, collector: any, { completedBatches }: any) => {
for (let index = 0; index < batch.length; index++) {
const { url, description } = batch[index];
+ // a local function used to record failure of an upload
const fail = (reason: string) => failed.push({ reason, batch: completedBatches + 1, index, url });
- const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(token, InjectSize(url, SizeSuffix.Original)).catch(fail);
+ // see image resizing - we store the size-agnostic url in our logic, but write out size-suffixed images to the file system
+ // so here, given a size agnostic url, we're just making that conversion so that the file system knows which bytes to actually upload
+ const imageToUpload = InjectSize(url, SizeSuffix.Original);
+ // STEP 1/2: send the raw bytes of the image from our server to Google's servers. We'll get back an upload token
+ // which acts as a pointer to those bytes that we can use to locate them later on
+ const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(token, imageToUpload).catch(fail);
if (!uploadToken) {
fail(`${path.extname(url)} is not an accepted extension`);
} else {
+ // gather the upload token return from Google (a pointer they give us to the raw, currently useless bytes
+ // we've uploaded to their servers) and put in the JSON format that the API accepts for image creation (used soon, below)
collector.push({
description,
simpleMediaItem: { uploadToken }
@@ -67,11 +92,24 @@ export default class GooglePhotosManager extends ApiManager {
}
}
);
- const failedCount = failed.length;
- if (failedCount) {
- console.error(`Unable to upload ${failedCount} image${failedCount === 1 ? "" : "s"} to Google's servers`);
+
+ // inform the developer / server console of any failed upload attempts
+ // does not abort the operation, since some subset of the uploads may have been successful
+ const { length } = failed;
+ if (length) {
+ console.error(`Unable to upload ${length} image${length === 1 ? "" : "s"} to Google's servers`);
console.log(failed.map(({ reason, batch, index, url }) => `@${batch}.${index}: ${url} failed:\n${reason}`).join('\n\n'));
}
+
+ // if none of the preliminary uploads was successful, no need to try and create images
+ // report the failure to the client and return
+ if (!newMediaItems.length) {
+ console.error(red(`${remoteUploadError} Thus, aborting image creation. Please try again.`));
+ _error(res, remoteUploadError);
+ return;
+ }
+
+ // STEP 2/2: create the media items and return the API's response to the client, along with any failures
return GooglePhotosUploadUtils.CreateMediaItems(token, newMediaItems, req.body.album).then(
results => _success(res, { results, failed }),
error => _error(res, mediaError, error)
@@ -79,35 +117,68 @@ export default class GooglePhotosManager extends ApiManager {
}
});
+ /**
+ * This route receives a list of urls that point to images
+ * stored on Google's servers and (following a *rough* heuristic)
+ * uploads each image to Dash's server if it hasn't already been uploaded.
+ * Unfortunately, since Google has so many of these images on its servers,
+ * these user content urls expire every 6 hours. So we can't store the url of a locally uploaded
+ * Google image and compare the candidate url to it to figure out if we already have it,
+ * since the same bytes on their server might now be associated with a new, random url.
+ * So, we do the next best thing and try to use an intrinsic attribute of those bytes as
+ * an identifier: the precise content size. This works in small cases, but has the obvious flaw of failing to upload
+ * an image locally if we already have uploaded another Google user content image with the exact same content size.
+ */
register({
method: Method.POST,
- subscription: "/googlePhotosMediaDownload",
+ subscription: "/googlePhotosMediaGet",
secureHandler: async ({ req, res }) => {
- const contents: { mediaItems: MediaItem[] } = req.body;
+ const { mediaItems } = req.body as { mediaItems: MediaItem[] };
+ if (!mediaItems) {
+ // non-starter, since the input was in an invalid format
+ _invalid(res, requestError);
+ return;
+ }
let failed = 0;
- if (contents) {
- const completed: Opt<DashUploadUtils.ImageUploadInformation>[] = [];
- for (const item of contents.mediaItems) {
- const { contentSize, ...attributes } = await DashUploadUtils.InspectImage(item.baseUrl);
- const found: Opt<DashUploadUtils.ImageUploadInformation> = await Database.Auxiliary.QueryUploadHistory(contentSize!);
- if (!found) {
- const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error));
- if (upload) {
- completed.push(upload);
- await Database.Auxiliary.LogUpload(upload);
- } else {
- failed++;
- }
+ const completed: Opt<Upload.ImageInformation>[] = [];
+ for (const { baseUrl } of mediaItems) {
+ // start by getting the content size of the remote image
+ const results = await DashUploadUtils.InspectImage(baseUrl);
+ if (results instanceof Error) {
+ // if something went wrong here, we can't hope to upload it, so just move on to the next
+ failed++;
+ continue;
+ }
+ const { contentSize, ...attributes } = results;
+ // check to see if we have uploaded a Google user content image *specifically via this route* already
+ // that has this exact content size
+ const found: Opt<Upload.ImageInformation> = await Database.Auxiliary.QueryUploadHistory(contentSize);
+ if (!found) {
+ // if we haven't, then upload it locally to Dash's server
+ const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, undefined, prefix, false).catch(error => _error(res, downloadError, error));
+ if (upload) {
+ completed.push(upload);
+ // inform the heuristic that we've encountered an image with this content size,
+ // to be later checked against in future uploads
+ await Database.Auxiliary.LogUpload(upload);
} else {
- completed.push(found);
+ // make note of a failure to upload locallys
+ failed++;
}
+ } else {
+ // if we have, the variable 'found' is handily the upload information of the
+ // existing image, so we add it to the list as if we had just uploaded it now without actually
+ // making a duplicate write
+ completed.push(found);
}
- if (failed) {
- return _error(res, UploadError(failed));
- }
- return _success(res, completed);
}
- _invalid(res, requestError);
+ // if there are any failures, report a general failure to the client
+ if (failed) {
+ return _error(res, localUploadError(failed));
+ }
+ // otherwise, return the image upload information list corresponding to the newly (or previously)
+ // uploaded images
+ _success(res, completed);
}
});
diff --git a/src/server/ApiManagers/SearchManager.ts b/src/server/ApiManagers/SearchManager.ts
index 4ce12f9f3..be17c3105 100644
--- a/src/server/ApiManagers/SearchManager.ts
+++ b/src/server/ApiManagers/SearchManager.ts
@@ -4,11 +4,15 @@ import { Search } from "../Search";
const findInFiles = require('find-in-files');
import * as path from 'path';
import { pathToDirectory, Directory } from "./UploadManager";
-import { red, cyan, yellow } from "colors";
+import { red, cyan, yellow, green } from "colors";
import RouteSubscriber from "../RouteSubscriber";
-import { exec } from "child_process";
+import { exec, execSync } from "child_process";
import { onWindows } from "..";
import { get } from "request-promise";
+import { log_execution } from "../ActionUtilities";
+import { Database } from "../database";
+import rimraf = require("rimraf");
+import { mkdirSync, chmod, chmodSync } from "fs";
export class SearchManager extends ApiManager {
@@ -19,10 +23,17 @@ export class SearchManager extends ApiManager {
subscription: new RouteSubscriber("solr").add("action"),
secureHandler: async ({ req, res }) => {
const { action } = req.params;
- if (["start", "stop"].includes(action)) {
- const status = req.params.action === "start";
- const success = await SolrManager.SetRunning(status);
- console.log(success ? `Successfully ${status ? "started" : "stopped"} Solr!` : `Uh oh! Check the console for the error that occurred while ${status ? "starting" : "stopping"} Solr`);
+ switch (action) {
+ case "start":
+ case "stop":
+ const status = req.params.action === "start";
+ SolrManager.SetRunning(status);
+ break;
+ case "update":
+ await SolrManager.update();
+ break;
+ default:
+ console.log(yellow(`${action} is an unknown solr operation.`));
}
res.redirect("/home");
}
@@ -69,12 +80,10 @@ export class SearchManager extends ApiManager {
export namespace SolrManager {
- const command = onWindows ? "solr.cmd" : "solr";
-
- export async function SetRunning(status: boolean): Promise<boolean> {
+ export function SetRunning(status: boolean) {
const args = status ? "start" : "stop -p 8983";
console.log(`solr management: trying to ${args}`);
- exec(`${command} ${args}`, { cwd: "./solr-8.3.1/bin" }, (error, stdout, stderr) => {
+ exec(`solr ${args}`, { cwd: "./solr-8.3.1/bin" }, (error, stdout, stderr) => {
if (error) {
console.log(red(`solr management error: unable to ${args} server`));
console.log(red(error.message));
@@ -82,12 +91,127 @@ export namespace SolrManager {
console.log(cyan(stdout));
console.log(yellow(stderr));
});
+ if (status) {
+ console.log(cyan("Start script is executing: please allow 15 seconds for solr to start on port 8983."));
+ }
+ }
+
+ export async function update() {
+ console.log(green("Beginning update..."));
+ await log_execution<void>({
+ startMessage: "Clearing existing Solr information...",
+ endMessage: "Solr information successfully cleared",
+ action: Search.clear,
+ color: cyan
+ });
+ const cursor = await log_execution({
+ startMessage: "Connecting to and querying for all documents from database...",
+ endMessage: ({ result, error }) => {
+ const success = error === null && result !== undefined;
+ if (!success) {
+ console.log(red("Unable to connect to the database."));
+ process.exit(0);
+ }
+ return "Connection successful and query complete";
+ },
+ action: () => Database.Instance.query({}),
+ color: yellow
+ });
+ const updates: any[] = [];
+ let numDocs = 0;
+ function updateDoc(doc: any) {
+ numDocs++;
+ if ((numDocs % 50) === 0) {
+ console.log(`Batch of 50 complete, total of ${numDocs}`);
+ }
+ if (doc.__type !== "Doc") {
+ return;
+ }
+ const fields = doc.fields;
+ if (!fields) {
+ return;
+ }
+ const update: any = { id: doc._id };
+ let dynfield = false;
+ for (const key in fields) {
+ const value = fields[key];
+ const term = ToSearchTerm(value);
+ if (term !== undefined) {
+ const { suffix, value } = term;
+ update[key + suffix] = value;
+ dynfield = true;
+ }
+ }
+ if (dynfield) {
+ updates.push(update);
+ }
+ }
+ await cursor?.forEach(updateDoc);
+ const result = await log_execution({
+ startMessage: `Dispatching updates for ${updates.length} documents`,
+ endMessage: "Dispatched updates complete",
+ action: () => Search.updateDocuments(updates),
+ color: cyan
+ });
try {
- await get("http://localhost:8983");
- return true;
- } catch {
- return false;
+ if (result) {
+ const { status } = JSON.parse(result).responseHeader;
+ console.log(status ? red(`Failed with status code (${status})`) : green("Success!"));
+ } else {
+ console.log(red("Solr is likely not running!"));
+ }
+ } catch (e) {
+ console.log(red("Error:"));
+ console.log(e);
+ console.log("\n");
}
+ await cursor?.close();
+ }
+
+ const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = {
+ "number": "_n",
+ "string": "_t",
+ "boolean": "_b",
+ "image": ["_t", "url"],
+ "video": ["_t", "url"],
+ "pdf": ["_t", "url"],
+ "audio": ["_t", "url"],
+ "web": ["_t", "url"],
+ "date": ["_d", value => new Date(value.date).toISOString()],
+ "proxy": ["_i", "fieldId"],
+ "list": ["_l", list => {
+ const results = [];
+ for (const value of list.fields) {
+ const term = ToSearchTerm(value);
+ if (term) {
+ results.push(term.value);
+ }
+ }
+ return results.length ? results : null;
+ }]
+ };
+
+ function ToSearchTerm(val: any): { suffix: string, value: any } | undefined {
+ if (val === null || val === undefined) {
+ return;
+ }
+ const type = val.__type || typeof val;
+ let suffix = suffixMap[type];
+ if (!suffix) {
+ return;
+ }
+
+ if (Array.isArray(suffix)) {
+ const accessor = suffix[1];
+ if (typeof accessor === "function") {
+ val = accessor(val);
+ } else {
+ val = val[accessor];
+ }
+ suffix = suffix[0];
+ }
+
+ return { suffix, value: val };
}
} \ No newline at end of file
diff --git a/src/server/ApiManagers/SessionManager.ts b/src/server/ApiManagers/SessionManager.ts
index f1629b8f0..bcaa6598f 100644
--- a/src/server/ApiManagers/SessionManager.ts
+++ b/src/server/ApiManagers/SessionManager.ts
@@ -53,6 +53,15 @@ export default class SessionManager extends ApiManager {
})
});
+ register({
+ method: Method.GET,
+ subscription: this.secureSubscriber("delete"),
+ secureHandler: this.authorizedAction(async ({ res }) => {
+ const { error } = await sessionAgent.serverWorker.emit("delete");
+ res.send(error ? error.message : "Your request was successful: the server successfully deleted the database. Return to /home.");
+ })
+ });
+
}
} \ No newline at end of file
diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts
index e18b6826e..f872bdf94 100644
--- a/src/server/ApiManagers/UploadManager.ts
+++ b/src/server/ApiManagers/UploadManager.ts
@@ -4,12 +4,12 @@ import * as formidable from 'formidable';
import v4 = require('uuid/v4');
const AdmZip = require('adm-zip');
import { extname, basename, dirname } from 'path';
-import { createReadStream, createWriteStream, unlink, readFileSync } from "fs";
+import { createReadStream, createWriteStream, unlink } from "fs";
import { publicDirectory, filesDirectory } from "..";
import { Database } from "../database";
-import { DashUploadUtils, SizeSuffix } from "../DashUploadUtils";
+import { DashUploadUtils } from "../DashUploadUtils";
import * as sharp from 'sharp';
-import { AcceptibleMedia } from "../SharedMediaTypes";
+import { AcceptibleMedia, Upload } from "../SharedMediaTypes";
import { normalize } from "path";
const imageDataUri = require('image-data-uri');
@@ -48,7 +48,7 @@ export default class UploadManager extends ApiManager {
form.keepExtensions = true;
return new Promise<void>(resolve => {
form.parse(req, async (_err, _fields, files) => {
- const results: any[] = [];
+ const results: Upload.FileResponse[] = [];
for (const key in files) {
const result = await DashUploadUtils.upload(files[key]);
result && results.push(result);
@@ -66,7 +66,8 @@ export default class UploadManager extends ApiManager {
secureHandler: async ({ req, res }) => {
const { sources } = req.body;
if (Array.isArray(sources)) {
- return res.send(await Promise.all(sources.map(url => DashUploadUtils.UploadImage(url))));
+ const results = await Promise.all(sources.map(source => DashUploadUtils.UploadImage(source)));
+ return res.send(results);
}
res.send();
}
diff --git a/src/server/ApiManagers/UserManager.ts b/src/server/ApiManagers/UserManager.ts
index b0d868918..d9d346cc1 100644
--- a/src/server/ApiManagers/UserManager.ts
+++ b/src/server/ApiManagers/UserManager.ts
@@ -34,7 +34,7 @@ export default class UserManager extends ApiManager {
register({
method: Method.GET,
subscription: "/getCurrentUser",
- secureHandler: ({ res, user }) => res.send(JSON.stringify(user)),
+ secureHandler: ({ res, user: { _id, email } }) => res.send(JSON.stringify({ id: _id, email })),
publicHandler: ({ res }) => res.send(JSON.stringify({ id: "__guest__", email: "" }))
});
diff --git a/src/server/ApiManagers/UtilManager.ts b/src/server/ApiManagers/UtilManager.ts
index 32aecd3c6..d18529cf2 100644
--- a/src/server/ApiManagers/UtilManager.ts
+++ b/src/server/ApiManagers/UtilManager.ts
@@ -1,7 +1,6 @@
import ApiManager, { Registration } from "./ApiManager";
import { Method } from "../RouteManager";
import { exec } from 'child_process';
-import { command_line } from "../ActionUtilities";
import RouteSubscriber from "../RouteSubscriber";
import { red } from "colors";
import { IBM_Recommender } from "../../client/apis/IBM_Recommender";
@@ -9,6 +8,7 @@ import { Recommender } from "../Recommender";
const recommender = new Recommender();
recommender.testModel();
+import executeImport from "../../scraping/buxton/final/BuxtonImporter";
export default class UtilManager extends ApiManager {
@@ -67,20 +67,6 @@ export default class UtilManager extends ApiManager {
register({
method: Method.GET,
- subscription: "/buxton",
- secureHandler: async ({ res }) => {
- const cwd = './src/scraping/buxton';
-
- const onResolved = (stdout: string) => { console.log(stdout); res.redirect("/"); };
- const onRejected = (err: any) => { console.error(err.message); res.send(err); };
- const tryPython3 = () => command_line('python3 scraper.py', cwd).then(onResolved, onRejected);
-
- return command_line('python scraper.py', cwd).then(onResolved, tryPython3);
- },
- });
-
- register({
- method: Method.GET,
subscription: "/version",
secureHandler: ({ res }) => {
return new Promise<void>(resolve => {