diff options
Diffstat (limited to 'src/server/ApiManagers')
-rw-r--r-- | src/server/ApiManagers/DeleteManager.ts | 31 | ||||
-rw-r--r-- | src/server/ApiManagers/DownloadManager.ts | 6 | ||||
-rw-r--r-- | src/server/ApiManagers/GooglePhotosManager.ts | 284 | ||||
-rw-r--r-- | src/server/ApiManagers/SearchManager.ts | 154 | ||||
-rw-r--r-- | src/server/ApiManagers/SessionManager.ts | 9 | ||||
-rw-r--r-- | src/server/ApiManagers/UploadManager.ts | 28 | ||||
-rw-r--r-- | src/server/ApiManagers/UserManager.ts | 2 | ||||
-rw-r--r-- | src/server/ApiManagers/UtilManager.ts | 60 |
8 files changed, 464 insertions, 110 deletions
diff --git a/src/server/ApiManagers/DeleteManager.ts b/src/server/ApiManagers/DeleteManager.ts index be452c0ff..9e70af2eb 100644 --- a/src/server/ApiManagers/DeleteManager.ts +++ b/src/server/ApiManagers/DeleteManager.ts @@ -2,6 +2,11 @@ import ApiManager, { Registration } from "./ApiManager"; import { Method, _permission_denied, PublicHandler } from "../RouteManager"; import { WebSocket } from "../Websocket/Websocket"; import { Database } from "../database"; +import rimraf = require("rimraf"); +import { pathToDirectory, Directory } from "./UploadManager"; +import { filesDirectory } from ".."; +import { DashUploadUtils } from "../DashUploadUtils"; +import { mkdirSync } from "fs"; export default class DeleteManager extends ApiManager { @@ -31,21 +36,19 @@ export default class DeleteManager extends ApiManager { } }); - const hi: PublicHandler = async ({ res, isRelease }) => { - if (isRelease) { - return _permission_denied(res, deletionPermissionError); + register({ + method: Method.GET, + subscription: "/deleteAssets", + secureHandler: async ({ res, isRelease }) => { + if (isRelease) { + return _permission_denied(res, deletionPermissionError); + } + rimraf.sync(filesDirectory); + mkdirSync(filesDirectory); + await DashUploadUtils.buildFileDirectories(); + res.redirect("/delete"); } - await Database.Instance.deleteAll('users'); - res.redirect("/home"); - }; - - // register({ - // method: Method.GET, - // subscription: "/deleteUsers", - // onValidation: hi, - // onUnauthenticated: hi - // }); - + }); register({ method: Method.GET, diff --git a/src/server/ApiManagers/DownloadManager.ts b/src/server/ApiManagers/DownloadManager.ts index 1bb84f374..01d2dfcad 100644 --- a/src/server/ApiManagers/DownloadManager.ts +++ b/src/server/ApiManagers/DownloadManager.ts @@ -254,11 +254,13 @@ async function writeHierarchyRecursive(file: Archiver.Archiver, hierarchy: Hiera // and dropped in the browser and thus hosted remotely) so we upload it // to our server and point the zip file to it, so it can bundle up the bytes const information = await DashUploadUtils.UploadImage(result); - path = information.serverAccessPaths[SizeSuffix.Original]; + path = information instanceof Error ? "" : information.accessPaths[SizeSuffix.Original].server; } // write the file specified by the path to the directory in the // zip file given by the prefix. - file.file(path, { name: documentTitle, prefix }); + if (path) { + file.file(path, { name: documentTitle, prefix }); + } } else { // we've hit a collection, so we have to recurse await writeHierarchyRecursive(file, result, `${prefix}/${documentTitle}`); diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts index 107542ce2..88219423d 100644 --- a/src/server/ApiManagers/GooglePhotosManager.ts +++ b/src/server/ApiManagers/GooglePhotosManager.ts @@ -3,33 +3,39 @@ import { Method, _error, _success, _invalid } from "../RouteManager"; import * as path from "path"; import { GoogleApiServerUtils } from "../apis/google/GoogleApiServerUtils"; import { BatchedArray, TimeUnit } from "array-batcher"; -import { GooglePhotosUploadUtils } from "../apis/google/GooglePhotosUploadUtils"; import { Opt } from "../../new_fields/Doc"; import { DashUploadUtils, InjectSize, SizeSuffix } from "../DashUploadUtils"; import { Database } from "../database"; +import { red } from "colors"; +import { Upload } from "../SharedMediaTypes"; +import request = require('request-promise'); +import { NewMediaItemResult } from "../apis/google/SharedTypes"; +const prefix = "google_photos_"; +const remoteUploadError = "None of the preliminary uploads to Google's servers was successful."; const authenticationError = "Unable to authenticate Google credentials before uploading to Google Photos!"; const mediaError = "Unable to convert all uploaded bytes to media items!"; -const UploadError = (count: number) => `Unable to upload ${count} images to Dash's server`; +const localUploadError = (count: number) => `Unable to upload ${count} images to Dash's server`; const requestError = "Unable to execute download: the body's media items were malformed."; const downloadError = "Encountered an error while executing downloads."; + interface GooglePhotosUploadFailure { batch: number; index: number; url: string; reason: string; } + interface MediaItem { baseUrl: string; - filename: string; } + interface NewMediaItem { description: string; simpleMediaItem: { uploadToken: string; }; } -const prefix = "google_photos_"; /** * This manager handles the creation of routes for google photos functionality. @@ -38,27 +44,47 @@ export default class GooglePhotosManager extends ApiManager { protected initialize(register: Registration): void { + /** + * This route receives a list of urls that point to images stored + * on Dash's file system, and, in a two step process, uploads them to Google's servers and + * returns the information Google generates about the associated uploaded remote images. + */ register({ method: Method.POST, - subscription: "/googlePhotosMediaUpload", + subscription: "/googlePhotosMediaPost", secureHandler: async ({ user, req, res }) => { const { media } = req.body; + + // first we need to ensure that we know the google account to which these photos will be uploaded const token = await GoogleApiServerUtils.retrieveAccessToken(user.id); if (!token) { return _error(res, authenticationError); } + + // next, having one large list or even synchronously looping over things trips a threshold + // set on Google's servers, and would instantly return an error. So, we ease things out and send the photos to upload in + // batches of 25, where the next batch is sent 100 millieconds after we receive a response from Google's servers. const failed: GooglePhotosUploadFailure[] = []; - const batched = BatchedArray.from<GooglePhotosUploadUtils.UploadSource>(media, { batchSize: 25 }); + const batched = BatchedArray.from<Uploader.UploadSource>(media, { batchSize: 25 }); + const interval = { magnitude: 100, unit: TimeUnit.Milliseconds }; const newMediaItems = await batched.batchedMapPatientInterval<NewMediaItem>( - { magnitude: 100, unit: TimeUnit.Milliseconds }, - async (batch: any, collector: any, { completedBatches }: any) => { + interval, + async (batch, collector, { completedBatches }) => { for (let index = 0; index < batch.length; index++) { const { url, description } = batch[index]; + // a local function used to record failure of an upload const fail = (reason: string) => failed.push({ reason, batch: completedBatches + 1, index, url }); - const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(token, InjectSize(url, SizeSuffix.Original)).catch(fail); + // see image resizing - we store the size-agnostic url in our logic, but write out size-suffixed images to the file system + // so here, given a size agnostic url, we're just making that conversion so that the file system knows which bytes to actually upload + const imageToUpload = InjectSize(url, SizeSuffix.Original); + // STEP 1/2: send the raw bytes of the image from our server to Google's servers. We'll get back an upload token + // which acts as a pointer to those bytes that we can use to locate them later on + const uploadToken = await Uploader.SendBytes(token, imageToUpload).catch(fail); if (!uploadToken) { fail(`${path.extname(url)} is not an accepted extension`); } else { + // gather the upload token return from Google (a pointer they give us to the raw, currently useless bytes + // we've uploaded to their servers) and put in the JSON format that the API accepts for image creation (used soon, below) collector.push({ description, simpleMediaItem: { uploadToken } @@ -67,49 +93,239 @@ export default class GooglePhotosManager extends ApiManager { } } ); - const failedCount = failed.length; - if (failedCount) { - console.error(`Unable to upload ${failedCount} image${failedCount === 1 ? "" : "s"} to Google's servers`); + + // inform the developer / server console of any failed upload attempts + // does not abort the operation, since some subset of the uploads may have been successful + const { length } = failed; + if (length) { + console.error(`Unable to upload ${length} image${length === 1 ? "" : "s"} to Google's servers`); console.log(failed.map(({ reason, batch, index, url }) => `@${batch}.${index}: ${url} failed:\n${reason}`).join('\n\n')); } - return GooglePhotosUploadUtils.CreateMediaItems(token, newMediaItems, req.body.album).then( + + // if none of the preliminary uploads was successful, no need to try and create images + // report the failure to the client and return + if (!newMediaItems.length) { + console.error(red(`${remoteUploadError} Thus, aborting image creation. Please try again.`)); + _error(res, remoteUploadError); + return; + } + + // STEP 2/2: create the media items and return the API's response to the client, along with any failures + return Uploader.CreateMediaItems(token, newMediaItems, req.body.album).then( results => _success(res, { results, failed }), error => _error(res, mediaError, error) ); } }); + /** + * This route receives a list of urls that point to images + * stored on Google's servers and (following a *rough* heuristic) + * uploads each image to Dash's server if it hasn't already been uploaded. + * Unfortunately, since Google has so many of these images on its servers, + * these user content urls expire every 6 hours. So we can't store the url of a locally uploaded + * Google image and compare the candidate url to it to figure out if we already have it, + * since the same bytes on their server might now be associated with a new, random url. + * So, we do the next best thing and try to use an intrinsic attribute of those bytes as + * an identifier: the precise content size. This works in small cases, but has the obvious flaw of failing to upload + * an image locally if we already have uploaded another Google user content image with the exact same content size. + */ register({ method: Method.POST, - subscription: "/googlePhotosMediaDownload", + subscription: "/googlePhotosMediaGet", secureHandler: async ({ req, res }) => { - const contents: { mediaItems: MediaItem[] } = req.body; + const { mediaItems } = req.body as { mediaItems: MediaItem[] }; + if (!mediaItems) { + // non-starter, since the input was in an invalid format + _invalid(res, requestError); + return; + } let failed = 0; - if (contents) { - const completed: Opt<DashUploadUtils.ImageUploadInformation>[] = []; - for (const item of contents.mediaItems) { - const { contentSize, ...attributes } = await DashUploadUtils.InspectImage(item.baseUrl); - const found: Opt<DashUploadUtils.ImageUploadInformation> = await Database.Auxiliary.QueryUploadHistory(contentSize!); - if (!found) { - const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error)); - if (upload) { - completed.push(upload); - await Database.Auxiliary.LogUpload(upload); - } else { - failed++; - } + const completed: Opt<Upload.ImageInformation>[] = []; + for (const { baseUrl } of mediaItems) { + // start by getting the content size of the remote image + const results = await DashUploadUtils.InspectImage(baseUrl); + if (results instanceof Error) { + // if something went wrong here, we can't hope to upload it, so just move on to the next + failed++; + continue; + } + const { contentSize, ...attributes } = results; + // check to see if we have uploaded a Google user content image *specifically via this route* already + // that has this exact content size + const found: Opt<Upload.ImageInformation> = await Database.Auxiliary.QueryUploadHistory(contentSize); + if (!found) { + // if we haven't, then upload it locally to Dash's server + const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, undefined, prefix, false).catch(error => _error(res, downloadError, error)); + if (upload) { + completed.push(upload); + // inform the heuristic that we've encountered an image with this content size, + // to be later checked against in future uploads + await Database.Auxiliary.LogUpload(upload); } else { - completed.push(found); + // make note of a failure to upload locallys + failed++; } + } else { + // if we have, the variable 'found' is handily the upload information of the + // existing image, so we add it to the list as if we had just uploaded it now without actually + // making a duplicate write + completed.push(found); } - if (failed) { - return _error(res, UploadError(failed)); - } - return _success(res, completed); } - _invalid(res, requestError); + // if there are any failures, report a general failure to the client + if (failed) { + return _error(res, localUploadError(failed)); + } + // otherwise, return the image upload information list corresponding to the newly (or previously) + // uploaded images + _success(res, completed); } }); } +} + +/** + * This namespace encompasses the logic + * necessary to upload images to Google's server, + * and then initialize / create those images in the Photos + * API given the upload tokens returned from the initial + * uploading process. + * + * https://developers.google.com/photos/library/reference/rest/v1/mediaItems/batchCreate + */ +export namespace Uploader { + + /** + * Specifies the structure of the object + * necessary to upload bytes to Google's servers. + * The url is streamed to access the image's bytes, + * and the description is what appears in Google Photos' + * description field. + */ + export interface UploadSource { + url: string; + description: string; + } + + /** + * This is the format needed to pass + * into the BatchCreate API request + * to take a reference to raw uploaded bytes + * and actually create an image in Google Photos. + * + * So, to instantiate this interface you must have already dispatched an upload + * and received an upload token. + */ + export interface NewMediaItem { + description: string; + simpleMediaItem: { + uploadToken: string; + }; + } + + /** + * A utility function to streamline making + * calls to the API's url - accentuates + * the relative path in the caller. + * @param extension the desired + * subset of the API + */ + function prepend(extension: string): string { + return `https://photoslibrary.googleapis.com/v1/${extension}`; + } + + /** + * Factors out the creation of the API request's + * authentication elements stored in the header. + * @param type the contents of the request + * @param token the user-specific Google access token + */ + function headers(type: string, token: string) { + return { + 'Content-Type': `application/${type}`, + 'Authorization': `Bearer ${token}`, + }; + } + + /** + * This is the first step in the remote image creation process. + * Here we upload the raw bytes of the image to Google's servers by + * setting authentication and other required header properties and including + * the raw bytes to the image, to be uploaded, in the body of the request. + * @param bearerToken the user-specific Google access token, specifies the account associated + * with the eventual image creation + * @param url the url of the image to upload + * @param filename an optional name associated with the uploaded image - if not specified + * defaults to the filename (basename) in the url + */ + export const SendBytes = async (bearerToken: string, url: string, filename?: string): Promise<any> => { + // check if the url points to a non-image or an unsupported format + if (!DashUploadUtils.validateExtension(url)) { + return undefined; + } + const body = await request(url, { encoding: null }); // returns a readable stream with the unencoded binary image data + const parameters = { + method: 'POST', + uri: prepend('uploads'), + headers: { + ...headers('octet-stream', bearerToken), + 'X-Goog-Upload-File-Name': filename || path.basename(url), + 'X-Goog-Upload-Protocol': 'raw' + }, + body + }; + return new Promise((resolve, reject) => request(parameters, (error, _response, body) => { + if (error) { + // on rejection, the server logs the error and the offending image + return reject(error); + } + resolve(body); + })); + }; + + /** + * This is the second step in the remote image creation process: having uploaded + * the raw bytes of the image and received / stored pointers (upload tokens) to those + * bytes, we can now instruct the API to finalize the creation of those images by + * submitting a batch create request with the list of upload tokens and the description + * to be associated with reach resulting new image. + * @param bearerToken the user-specific Google access token, specifies the account associated + * with the eventual image creation + * @param newMediaItems a list of objects containing a description and, effectively, the + * pointer to the uploaded bytes + * @param album if included, will add all of the newly created remote images to the album + * with the specified id + */ + export const CreateMediaItems = async (bearerToken: string, newMediaItems: NewMediaItem[], album?: { id: string }): Promise<NewMediaItemResult[]> => { + // it's important to note that the API can't handle more than 50 items in each request and + // seems to need at least some latency between requests (spamming it synchronously has led to the server returning errors)... + const batched = BatchedArray.from(newMediaItems, { batchSize: 50 }); + // ...so we execute them in delayed batches and await the entire execution + return batched.batchedMapPatientInterval( + { magnitude: 100, unit: TimeUnit.Milliseconds }, + async (batch: NewMediaItem[], collector): Promise<void> => { + const parameters = { + method: 'POST', + headers: headers('json', bearerToken), + uri: prepend('mediaItems:batchCreate'), + body: { newMediaItems: batch } as any, + json: true + }; + // register the target album, if provided + album && (parameters.body.albumId = album.id); + collector.push(...(await new Promise<NewMediaItemResult[]>((resolve, reject) => { + request(parameters, (error, _response, body) => { + if (error) { + reject(error); + } else { + resolve(body.newMediaItemResults); + } + }); + }))); + } + ); + }; + }
\ No newline at end of file diff --git a/src/server/ApiManagers/SearchManager.ts b/src/server/ApiManagers/SearchManager.ts index 4ce12f9f3..5f7d1cf6d 100644 --- a/src/server/ApiManagers/SearchManager.ts +++ b/src/server/ApiManagers/SearchManager.ts @@ -4,11 +4,15 @@ import { Search } from "../Search"; const findInFiles = require('find-in-files'); import * as path from 'path'; import { pathToDirectory, Directory } from "./UploadManager"; -import { red, cyan, yellow } from "colors"; +import { red, cyan, yellow, green } from "colors"; import RouteSubscriber from "../RouteSubscriber"; -import { exec } from "child_process"; +import { exec, execSync } from "child_process"; import { onWindows } from ".."; import { get } from "request-promise"; +import { log_execution } from "../ActionUtilities"; +import { Database } from "../database"; +import rimraf = require("rimraf"); +import { mkdirSync, chmod, chmodSync } from "fs"; export class SearchManager extends ApiManager { @@ -19,10 +23,17 @@ export class SearchManager extends ApiManager { subscription: new RouteSubscriber("solr").add("action"), secureHandler: async ({ req, res }) => { const { action } = req.params; - if (["start", "stop"].includes(action)) { - const status = req.params.action === "start"; - const success = await SolrManager.SetRunning(status); - console.log(success ? `Successfully ${status ? "started" : "stopped"} Solr!` : `Uh oh! Check the console for the error that occurred while ${status ? "starting" : "stopping"} Solr`); + switch (action) { + case "start": + case "stop": + const status = req.params.action === "start"; + SolrManager.SetRunning(status); + break; + case "update": + await SolrManager.update(); + break; + default: + console.log(yellow(`${action} is an unknown solr operation.`)); } res.redirect("/home"); } @@ -50,7 +61,7 @@ export class SearchManager extends ApiManager { register({ method: Method.GET, - subscription: "/search", + subscription: "/dashsearch", secureHandler: async ({ req, res }) => { const solrQuery: any = {}; ["q", "fq", "start", "rows", "hl", "hl.fl"].forEach(key => solrQuery[key] = req.query[key]); @@ -69,12 +80,10 @@ export class SearchManager extends ApiManager { export namespace SolrManager { - const command = onWindows ? "solr.cmd" : "solr"; - - export async function SetRunning(status: boolean): Promise<boolean> { + export function SetRunning(status: boolean) { const args = status ? "start" : "stop -p 8983"; console.log(`solr management: trying to ${args}`); - exec(`${command} ${args}`, { cwd: "./solr-8.3.1/bin" }, (error, stdout, stderr) => { + exec(`solr ${args}`, { cwd: "./solr-8.3.1/bin" }, (error, stdout, stderr) => { if (error) { console.log(red(`solr management error: unable to ${args} server`)); console.log(red(error.message)); @@ -82,12 +91,127 @@ export namespace SolrManager { console.log(cyan(stdout)); console.log(yellow(stderr)); }); + if (status) { + console.log(cyan("Start script is executing: please allow 15 seconds for solr to start on port 8983.")); + } + } + + export async function update() { + console.log(green("Beginning update...")); + await log_execution<void>({ + startMessage: "Clearing existing Solr information...", + endMessage: "Solr information successfully cleared", + action: Search.clear, + color: cyan + }); + const cursor = await log_execution({ + startMessage: "Connecting to and querying for all documents from database...", + endMessage: ({ result, error }) => { + const success = error === null && result !== undefined; + if (!success) { + console.log(red("Unable to connect to the database.")); + process.exit(0); + } + return "Connection successful and query complete"; + }, + action: () => Database.Instance.query({}), + color: yellow + }); + const updates: any[] = []; + let numDocs = 0; + function updateDoc(doc: any) { + numDocs++; + if ((numDocs % 50) === 0) { + console.log(`Batch of 50 complete, total of ${numDocs}`); + } + if (doc.__type !== "Doc") { + return; + } + const fields = doc.fields; + if (!fields) { + return; + } + const update: any = { id: doc._id }; + let dynfield = false; + for (const key in fields) { + const value = fields[key]; + const term = ToSearchTerm(value); + if (term !== undefined) { + const { suffix, value } = term; + update[key + suffix] = value; + dynfield = true; + } + } + if (dynfield) { + updates.push(update); + } + } + await cursor?.forEach(updateDoc); + const result = await log_execution({ + startMessage: `Dispatching updates for ${updates.length} documents`, + endMessage: "Dispatched updates complete", + action: () => Search.updateDocuments(updates), + color: cyan + }); try { - await get("http://localhost:8983"); - return true; - } catch { - return false; + if (result) { + const { status } = JSON.parse(result).responseHeader; + console.log(status ? red(`Failed with status code (${status})`) : green("Success!")); + } else { + console.log(red("Solr is likely not running!")); + } + } catch (e) { + console.log(red("Error:")); + console.log(e); + console.log("\n"); } + await cursor?.close(); + } + + const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = { + "number": "_n", + "string": "_t", + "boolean": "_b", + "image": ["_t", "url"], + "video": ["_t", "url"], + "pdf": ["_t", "url"], + "audio": ["_t", "url"], + "web": ["_t", "url"], + "date": ["_d", value => new Date(value.date).toISOString()], + "proxy": ["_i", "fieldId"], + "list": ["_l", list => { + const results = []; + for (const value of list.fields) { + const term = ToSearchTerm(value); + if (term) { + results.push(term.value); + } + } + return results.length ? results : null; + }] + }; + + function ToSearchTerm(val: any): { suffix: string, value: any } | undefined { + if (val === null || val === undefined) { + return; + } + const type = val.__type || typeof val; + let suffix = suffixMap[type]; + if (!suffix) { + return; + } + + if (Array.isArray(suffix)) { + const accessor = suffix[1]; + if (typeof accessor === "function") { + val = accessor(val); + } else { + val = val[accessor]; + } + suffix = suffix[0]; + } + + return { suffix, value: val }; } }
\ No newline at end of file diff --git a/src/server/ApiManagers/SessionManager.ts b/src/server/ApiManagers/SessionManager.ts index 8ebd684bb..c993c985f 100644 --- a/src/server/ApiManagers/SessionManager.ts +++ b/src/server/ApiManagers/SessionManager.ts @@ -53,6 +53,15 @@ export default class SessionManager extends ApiManager { }) }); + register({ + method: Method.GET, + subscription: this.secureSubscriber("delete"), + secureHandler: this.authorizedAction(async ({ res }) => { + const { error } = await sessionAgent.serverWorker.emit("delete"); + res.send(error ? error.message : "Your request was successful: the server successfully deleted the database. Return to /home."); + }) + }); + } }
\ No newline at end of file diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index a92b613b7..98f029c7d 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -4,12 +4,12 @@ import * as formidable from 'formidable'; import v4 = require('uuid/v4'); const AdmZip = require('adm-zip'); import { extname, basename, dirname } from 'path'; -import { createReadStream, createWriteStream, unlink, readFileSync } from "fs"; +import { createReadStream, createWriteStream, unlink } from "fs"; import { publicDirectory, filesDirectory } from ".."; import { Database } from "../database"; -import { DashUploadUtils, SizeSuffix } from "../DashUploadUtils"; +import { DashUploadUtils, InjectSize, SizeSuffix } from "../DashUploadUtils"; import * as sharp from 'sharp'; -import { AcceptibleMedia } from "../SharedMediaTypes"; +import { AcceptibleMedia, Upload } from "../SharedMediaTypes"; import { normalize } from "path"; const imageDataUri = require('image-data-uri'); @@ -19,7 +19,8 @@ export enum Directory { videos = "videos", pdfs = "pdfs", text = "text", - pdf_thumbnails = "pdf_thumbnails" + pdf_thumbnails = "pdf_thumbnails", + audio = "audio" } export function serverPathToFile(directory: Directory, filename: string) { @@ -47,7 +48,7 @@ export default class UploadManager extends ApiManager { form.keepExtensions = true; return new Promise<void>(resolve => { form.parse(req, async (_err, _fields, files) => { - const results: any[] = []; + const results: Upload.FileResponse[] = []; for (const key in files) { const result = await DashUploadUtils.upload(files[key]); result && results.push(result); @@ -60,12 +61,22 @@ export default class UploadManager extends ApiManager { }); register({ + method: Method.GET, + subscription: "/hello", + secureHandler: ({ req, res }) => { + res.send("<h1>world!</h1>"); + } + }); + + register({ method: Method.POST, subscription: "/uploadRemoteImage", secureHandler: async ({ req, res }) => { + const { sources } = req.body; if (Array.isArray(sources)) { - return res.send(await Promise.all(sources.map(url => DashUploadUtils.UploadImage(url)))); + const results = await Promise.all(sources.map(source => DashUploadUtils.UploadImage(source))); + return res.send(results); } res.send(); } @@ -75,6 +86,7 @@ export default class UploadManager extends ApiManager { method: Method.POST, subscription: "/uploadDoc", secureHandler: ({ req, res }) => { + const form = new formidable.IncomingForm(); form.keepExtensions = true; // let path = req.body.path; @@ -179,6 +191,7 @@ export default class UploadManager extends ApiManager { method: Method.POST, subscription: "/inspectImage", secureHandler: async ({ req, res }) => { + const { source } = req.body; if (typeof source === "string") { return res.send(await DashUploadUtils.InspectImage(source)); @@ -197,7 +210,7 @@ export default class UploadManager extends ApiManager { res.status(401).send("incorrect parameters specified"); return; } - return imageDataUri.outputFile(uri, serverPathToFile(Directory.images, filename)).then((savedName: string) => { + return imageDataUri.outputFile(uri, serverPathToFile(Directory.images, InjectSize(filename, SizeSuffix.Original))).then((savedName: string) => { const ext = extname(savedName).toLowerCase(); const { pngs, jpgs } = AcceptibleMedia; const resizers = [ @@ -222,6 +235,7 @@ export default class UploadManager extends ApiManager { const path = serverPathToFile(Directory.images, filename + resizer.suffix + ext); createReadStream(savedName).pipe(resizer.resizer).pipe(createWriteStream(path)); }); + } res.send(clientPathToFile(Directory.images, filename + ext)); }); diff --git a/src/server/ApiManagers/UserManager.ts b/src/server/ApiManagers/UserManager.ts index b0d868918..d9d346cc1 100644 --- a/src/server/ApiManagers/UserManager.ts +++ b/src/server/ApiManagers/UserManager.ts @@ -34,7 +34,7 @@ export default class UserManager extends ApiManager { register({ method: Method.GET, subscription: "/getCurrentUser", - secureHandler: ({ res, user }) => res.send(JSON.stringify(user)), + secureHandler: ({ res, user: { _id, email } }) => res.send(JSON.stringify({ id: _id, email })), publicHandler: ({ res }) => res.send(JSON.stringify({ id: "__guest__", email: "" })) }); diff --git a/src/server/ApiManagers/UtilManager.ts b/src/server/ApiManagers/UtilManager.ts index 32aecd3c6..ad8119bf4 100644 --- a/src/server/ApiManagers/UtilManager.ts +++ b/src/server/ApiManagers/UtilManager.ts @@ -1,14 +1,14 @@ import ApiManager, { Registration } from "./ApiManager"; import { Method } from "../RouteManager"; import { exec } from 'child_process'; -import { command_line } from "../ActionUtilities"; import RouteSubscriber from "../RouteSubscriber"; import { red } from "colors"; -import { IBM_Recommender } from "../../client/apis/IBM_Recommender"; -import { Recommender } from "../Recommender"; +// import { IBM_Recommender } from "../../client/apis/IBM_Recommender"; +// import { Recommender } from "../Recommender"; -const recommender = new Recommender(); -recommender.testModel(); +// const recommender = new Recommender(); +// recommender.testModel(); +import executeImport from "../../scraping/buxton/final/BuxtonImporter"; export default class UtilManager extends ApiManager { @@ -27,25 +27,25 @@ export default class UtilManager extends ApiManager { } }); - register({ - method: Method.POST, - subscription: "/IBMAnalysis", - secureHandler: async ({ req, res }) => res.send(await IBM_Recommender.analyze(req.body)) - }); + // register({ + // method: Method.POST, + // subscription: "/IBMAnalysis", + // secureHandler: async ({ req, res }) => res.send(await IBM_Recommender.analyze(req.body)) + // }); - register({ - method: Method.POST, - subscription: "/recommender", - secureHandler: async ({ req, res }) => { - const keyphrases = req.body.keyphrases; - const wordvecs = await recommender.vectorize(keyphrases); - let embedding: Float32Array = new Float32Array(); - if (wordvecs && wordvecs.dataSync()) { - embedding = wordvecs.dataSync() as Float32Array; - } - res.send(embedding); - } - }); + // register({ + // method: Method.POST, + // subscription: "/recommender", + // secureHandler: async ({ req, res }) => { + // const keyphrases = req.body.keyphrases; + // const wordvecs = await recommender.vectorize(keyphrases); + // let embedding: Float32Array = new Float32Array(); + // if (wordvecs && wordvecs.dataSync()) { + // embedding = wordvecs.dataSync() as Float32Array; + // } + // res.send(embedding); + // } + // }); register({ @@ -67,20 +67,6 @@ export default class UtilManager extends ApiManager { register({ method: Method.GET, - subscription: "/buxton", - secureHandler: async ({ res }) => { - const cwd = './src/scraping/buxton'; - - const onResolved = (stdout: string) => { console.log(stdout); res.redirect("/"); }; - const onRejected = (err: any) => { console.error(err.message); res.send(err); }; - const tryPython3 = () => command_line('python3 scraper.py', cwd).then(onResolved, onRejected); - - return command_line('python scraper.py', cwd).then(onResolved, tryPython3); - }, - }); - - register({ - method: Method.GET, subscription: "/version", secureHandler: ({ res }) => { return new Promise<void>(resolve => { |