aboutsummaryrefslogtreecommitdiff
path: root/src/server/ApiManagers
diff options
context:
space:
mode:
Diffstat (limited to 'src/server/ApiManagers')
-rw-r--r--src/server/ApiManagers/GooglePhotosManager.ts155
-rw-r--r--src/server/ApiManagers/SearchManager.ts2
-rw-r--r--src/server/ApiManagers/UploadManager.ts19
-rw-r--r--src/server/ApiManagers/UtilManager.ts26
4 files changed, 193 insertions, 9 deletions
diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts
index 25c54ee2e..88219423d 100644
--- a/src/server/ApiManagers/GooglePhotosManager.ts
+++ b/src/server/ApiManagers/GooglePhotosManager.ts
@@ -3,12 +3,13 @@ import { Method, _error, _success, _invalid } from "../RouteManager";
import * as path from "path";
import { GoogleApiServerUtils } from "../apis/google/GoogleApiServerUtils";
import { BatchedArray, TimeUnit } from "array-batcher";
-import { GooglePhotosUploadUtils } from "../apis/google/GooglePhotosUploadUtils";
import { Opt } from "../../new_fields/Doc";
import { DashUploadUtils, InjectSize, SizeSuffix } from "../DashUploadUtils";
import { Database } from "../database";
import { red } from "colors";
import { Upload } from "../SharedMediaTypes";
+import request = require('request-promise');
+import { NewMediaItemResult } from "../apis/google/SharedTypes";
const prefix = "google_photos_";
const remoteUploadError = "None of the preliminary uploads to Google's servers was successful.";
@@ -64,11 +65,11 @@ export default class GooglePhotosManager extends ApiManager {
// set on Google's servers, and would instantly return an error. So, we ease things out and send the photos to upload in
// batches of 25, where the next batch is sent 100 millieconds after we receive a response from Google's servers.
const failed: GooglePhotosUploadFailure[] = [];
- const batched = BatchedArray.from<GooglePhotosUploadUtils.UploadSource>(media, { batchSize: 25 });
+ const batched = BatchedArray.from<Uploader.UploadSource>(media, { batchSize: 25 });
const interval = { magnitude: 100, unit: TimeUnit.Milliseconds };
const newMediaItems = await batched.batchedMapPatientInterval<NewMediaItem>(
interval,
- async (batch: any, collector: any, { completedBatches }: any) => {
+ async (batch, collector, { completedBatches }) => {
for (let index = 0; index < batch.length; index++) {
const { url, description } = batch[index];
// a local function used to record failure of an upload
@@ -78,7 +79,7 @@ export default class GooglePhotosManager extends ApiManager {
const imageToUpload = InjectSize(url, SizeSuffix.Original);
// STEP 1/2: send the raw bytes of the image from our server to Google's servers. We'll get back an upload token
// which acts as a pointer to those bytes that we can use to locate them later on
- const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(token, imageToUpload).catch(fail);
+ const uploadToken = await Uploader.SendBytes(token, imageToUpload).catch(fail);
if (!uploadToken) {
fail(`${path.extname(url)} is not an accepted extension`);
} else {
@@ -110,7 +111,7 @@ export default class GooglePhotosManager extends ApiManager {
}
// STEP 2/2: create the media items and return the API's response to the client, along with any failures
- return GooglePhotosUploadUtils.CreateMediaItems(token, newMediaItems, req.body.album).then(
+ return Uploader.CreateMediaItems(token, newMediaItems, req.body.album).then(
results => _success(res, { results, failed }),
error => _error(res, mediaError, error)
);
@@ -183,4 +184,148 @@ export default class GooglePhotosManager extends ApiManager {
});
}
+}
+
+/**
+ * This namespace encompasses the logic
+ * necessary to upload images to Google's server,
+ * and then initialize / create those images in the Photos
+ * API given the upload tokens returned from the initial
+ * uploading process.
+ *
+ * https://developers.google.com/photos/library/reference/rest/v1/mediaItems/batchCreate
+ */
+export namespace Uploader {
+
+ /**
+ * Specifies the structure of the object
+ * necessary to upload bytes to Google's servers.
+ * The url is streamed to access the image's bytes,
+ * and the description is what appears in Google Photos'
+ * description field.
+ */
+ export interface UploadSource {
+ url: string;
+ description: string;
+ }
+
+ /**
+ * This is the format needed to pass
+ * into the BatchCreate API request
+ * to take a reference to raw uploaded bytes
+ * and actually create an image in Google Photos.
+ *
+ * So, to instantiate this interface you must have already dispatched an upload
+ * and received an upload token.
+ */
+ export interface NewMediaItem {
+ description: string;
+ simpleMediaItem: {
+ uploadToken: string;
+ };
+ }
+
+ /**
+ * A utility function to streamline making
+ * calls to the API's url - accentuates
+ * the relative path in the caller.
+ * @param extension the desired
+ * subset of the API
+ */
+ function prepend(extension: string): string {
+ return `https://photoslibrary.googleapis.com/v1/${extension}`;
+ }
+
+ /**
+ * Factors out the creation of the API request's
+ * authentication elements stored in the header.
+ * @param type the contents of the request
+ * @param token the user-specific Google access token
+ */
+ function headers(type: string, token: string) {
+ return {
+ 'Content-Type': `application/${type}`,
+ 'Authorization': `Bearer ${token}`,
+ };
+ }
+
+ /**
+ * This is the first step in the remote image creation process.
+ * Here we upload the raw bytes of the image to Google's servers by
+ * setting authentication and other required header properties and including
+ * the raw bytes to the image, to be uploaded, in the body of the request.
+ * @param bearerToken the user-specific Google access token, specifies the account associated
+ * with the eventual image creation
+ * @param url the url of the image to upload
+ * @param filename an optional name associated with the uploaded image - if not specified
+ * defaults to the filename (basename) in the url
+ */
+ export const SendBytes = async (bearerToken: string, url: string, filename?: string): Promise<any> => {
+ // check if the url points to a non-image or an unsupported format
+ if (!DashUploadUtils.validateExtension(url)) {
+ return undefined;
+ }
+ const body = await request(url, { encoding: null }); // returns a readable stream with the unencoded binary image data
+ const parameters = {
+ method: 'POST',
+ uri: prepend('uploads'),
+ headers: {
+ ...headers('octet-stream', bearerToken),
+ 'X-Goog-Upload-File-Name': filename || path.basename(url),
+ 'X-Goog-Upload-Protocol': 'raw'
+ },
+ body
+ };
+ return new Promise((resolve, reject) => request(parameters, (error, _response, body) => {
+ if (error) {
+ // on rejection, the server logs the error and the offending image
+ return reject(error);
+ }
+ resolve(body);
+ }));
+ };
+
+ /**
+ * This is the second step in the remote image creation process: having uploaded
+ * the raw bytes of the image and received / stored pointers (upload tokens) to those
+ * bytes, we can now instruct the API to finalize the creation of those images by
+ * submitting a batch create request with the list of upload tokens and the description
+ * to be associated with reach resulting new image.
+ * @param bearerToken the user-specific Google access token, specifies the account associated
+ * with the eventual image creation
+ * @param newMediaItems a list of objects containing a description and, effectively, the
+ * pointer to the uploaded bytes
+ * @param album if included, will add all of the newly created remote images to the album
+ * with the specified id
+ */
+ export const CreateMediaItems = async (bearerToken: string, newMediaItems: NewMediaItem[], album?: { id: string }): Promise<NewMediaItemResult[]> => {
+ // it's important to note that the API can't handle more than 50 items in each request and
+ // seems to need at least some latency between requests (spamming it synchronously has led to the server returning errors)...
+ const batched = BatchedArray.from(newMediaItems, { batchSize: 50 });
+ // ...so we execute them in delayed batches and await the entire execution
+ return batched.batchedMapPatientInterval(
+ { magnitude: 100, unit: TimeUnit.Milliseconds },
+ async (batch: NewMediaItem[], collector): Promise<void> => {
+ const parameters = {
+ method: 'POST',
+ headers: headers('json', bearerToken),
+ uri: prepend('mediaItems:batchCreate'),
+ body: { newMediaItems: batch } as any,
+ json: true
+ };
+ // register the target album, if provided
+ album && (parameters.body.albumId = album.id);
+ collector.push(...(await new Promise<NewMediaItemResult[]>((resolve, reject) => {
+ request(parameters, (error, _response, body) => {
+ if (error) {
+ reject(error);
+ } else {
+ resolve(body.newMediaItemResults);
+ }
+ });
+ })));
+ }
+ );
+ };
+
} \ No newline at end of file
diff --git a/src/server/ApiManagers/SearchManager.ts b/src/server/ApiManagers/SearchManager.ts
index be17c3105..5f7d1cf6d 100644
--- a/src/server/ApiManagers/SearchManager.ts
+++ b/src/server/ApiManagers/SearchManager.ts
@@ -61,7 +61,7 @@ export class SearchManager extends ApiManager {
register({
method: Method.GET,
- subscription: "/search",
+ subscription: "/dashsearch",
secureHandler: async ({ req, res }) => {
const solrQuery: any = {};
["q", "fq", "start", "rows", "hl", "hl.fl"].forEach(key => solrQuery[key] = req.query[key]);
diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts
index 8f2a5ea3e..98f029c7d 100644
--- a/src/server/ApiManagers/UploadManager.ts
+++ b/src/server/ApiManagers/UploadManager.ts
@@ -7,7 +7,7 @@ import { extname, basename, dirname } from 'path';
import { createReadStream, createWriteStream, unlink } from "fs";
import { publicDirectory, filesDirectory } from "..";
import { Database } from "../database";
-import { DashUploadUtils } from "../DashUploadUtils";
+import { DashUploadUtils, InjectSize, SizeSuffix } from "../DashUploadUtils";
import * as sharp from 'sharp';
import { AcceptibleMedia, Upload } from "../SharedMediaTypes";
import { normalize } from "path";
@@ -19,7 +19,8 @@ export enum Directory {
videos = "videos",
pdfs = "pdfs",
text = "text",
- pdf_thumbnails = "pdf_thumbnails"
+ pdf_thumbnails = "pdf_thumbnails",
+ audio = "audio"
}
export function serverPathToFile(directory: Directory, filename: string) {
@@ -60,9 +61,18 @@ export default class UploadManager extends ApiManager {
});
register({
+ method: Method.GET,
+ subscription: "/hello",
+ secureHandler: ({ req, res }) => {
+ res.send("<h1>world!</h1>");
+ }
+ });
+
+ register({
method: Method.POST,
subscription: "/uploadRemoteImage",
secureHandler: async ({ req, res }) => {
+
const { sources } = req.body;
if (Array.isArray(sources)) {
const results = await Promise.all(sources.map(source => DashUploadUtils.UploadImage(source)));
@@ -76,6 +86,7 @@ export default class UploadManager extends ApiManager {
method: Method.POST,
subscription: "/uploadDoc",
secureHandler: ({ req, res }) => {
+
const form = new formidable.IncomingForm();
form.keepExtensions = true;
// let path = req.body.path;
@@ -180,6 +191,7 @@ export default class UploadManager extends ApiManager {
method: Method.POST,
subscription: "/inspectImage",
secureHandler: async ({ req, res }) => {
+
const { source } = req.body;
if (typeof source === "string") {
return res.send(await DashUploadUtils.InspectImage(source));
@@ -198,7 +210,7 @@ export default class UploadManager extends ApiManager {
res.status(401).send("incorrect parameters specified");
return;
}
- return imageDataUri.outputFile(uri, serverPathToFile(Directory.images, filename)).then((savedName: string) => {
+ return imageDataUri.outputFile(uri, serverPathToFile(Directory.images, InjectSize(filename, SizeSuffix.Original))).then((savedName: string) => {
const ext = extname(savedName).toLowerCase();
const { pngs, jpgs } = AcceptibleMedia;
const resizers = [
@@ -223,6 +235,7 @@ export default class UploadManager extends ApiManager {
const path = serverPathToFile(Directory.images, filename + resizer.suffix + ext);
createReadStream(savedName).pipe(resizer.resizer).pipe(createWriteStream(path));
});
+
}
res.send(clientPathToFile(Directory.images, filename + ext));
});
diff --git a/src/server/ApiManagers/UtilManager.ts b/src/server/ApiManagers/UtilManager.ts
index 8adc3da81..ad8119bf4 100644
--- a/src/server/ApiManagers/UtilManager.ts
+++ b/src/server/ApiManagers/UtilManager.ts
@@ -3,6 +3,11 @@ import { Method } from "../RouteManager";
import { exec } from 'child_process';
import RouteSubscriber from "../RouteSubscriber";
import { red } from "colors";
+// import { IBM_Recommender } from "../../client/apis/IBM_Recommender";
+// import { Recommender } from "../Recommender";
+
+// const recommender = new Recommender();
+// recommender.testModel();
import executeImport from "../../scraping/buxton/final/BuxtonImporter";
export default class UtilManager extends ApiManager {
@@ -22,6 +27,27 @@ export default class UtilManager extends ApiManager {
}
});
+ // register({
+ // method: Method.POST,
+ // subscription: "/IBMAnalysis",
+ // secureHandler: async ({ req, res }) => res.send(await IBM_Recommender.analyze(req.body))
+ // });
+
+ // register({
+ // method: Method.POST,
+ // subscription: "/recommender",
+ // secureHandler: async ({ req, res }) => {
+ // const keyphrases = req.body.keyphrases;
+ // const wordvecs = await recommender.vectorize(keyphrases);
+ // let embedding: Float32Array = new Float32Array();
+ // if (wordvecs && wordvecs.dataSync()) {
+ // embedding = wordvecs.dataSync() as Float32Array;
+ // }
+ // res.send(embedding);
+ // }
+ // });
+
+
register({
method: Method.GET,
subscription: "/pull",