aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/client/apis/google_docs/GooglePhotosClientUtils.ts4
-rw-r--r--src/new_fields/RichTextUtils.ts2
-rw-r--r--src/server/ApiManagers/GooglePhotosManager.ts132
3 files changed, 102 insertions, 36 deletions
diff --git a/src/client/apis/google_docs/GooglePhotosClientUtils.ts b/src/client/apis/google_docs/GooglePhotosClientUtils.ts
index 7e5d5fe1b..f8723f02d 100644
--- a/src/client/apis/google_docs/GooglePhotosClientUtils.ts
+++ b/src/client/apis/google_docs/GooglePhotosClientUtils.ts
@@ -306,7 +306,7 @@ export namespace GooglePhotos {
};
export const WriteMediaItemsToServer = async (body: { mediaItems: any[] }): Promise<UploadInformation[]> => {
- const uploads = await Networking.PostToServer("/googlePhotosMediaDownload", body);
+ const uploads = await Networking.PostToServer("/googlePhotosMediaGet", body);
return uploads;
};
@@ -344,7 +344,7 @@ export namespace GooglePhotos {
media.push({ url, description });
}
if (media.length) {
- const results = await Networking.PostToServer("/googlePhotosMediaUpload", { media, album });
+ const results = await Networking.PostToServer("/googlePhotosMediaPost", { media, album });
return results;
}
};
diff --git a/src/new_fields/RichTextUtils.ts b/src/new_fields/RichTextUtils.ts
index 7c1fc39d8..1d90c984d 100644
--- a/src/new_fields/RichTextUtils.ts
+++ b/src/new_fields/RichTextUtils.ts
@@ -128,7 +128,7 @@ export namespace RichTextUtils {
return { baseUrl: embeddedObject.imageProperties!.contentUri! };
});
- const uploads = await Networking.PostToServer("/googlePhotosMediaDownload", { mediaItems });
+ const uploads = await Networking.PostToServer("/googlePhotosMediaGet", { mediaItems });
if (uploads.length !== mediaItems.length) {
throw new AssertionError({ expected: mediaItems.length, actual: uploads.length, message: "Error with internally uploading inlineObjects!" });
diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts
index 3236d1ee2..04b724f4b 100644
--- a/src/server/ApiManagers/GooglePhotosManager.ts
+++ b/src/server/ApiManagers/GooglePhotosManager.ts
@@ -7,28 +7,33 @@ import { GooglePhotosUploadUtils } from "../apis/google/GooglePhotosUploadUtils"
import { Opt } from "../../new_fields/Doc";
import { DashUploadUtils, InjectSize, SizeSuffix } from "../DashUploadUtils";
import { Database } from "../database";
+import { red } from "colors";
+const prefix = "google_photos_";
+const remoteUploadError = "None of the preliminary uploads to Google's servers was successful.";
const authenticationError = "Unable to authenticate Google credentials before uploading to Google Photos!";
const mediaError = "Unable to convert all uploaded bytes to media items!";
-const UploadError = (count: number) => `Unable to upload ${count} images to Dash's server`;
+const localUploadError = (count: number) => `Unable to upload ${count} images to Dash's server`;
const requestError = "Unable to execute download: the body's media items were malformed.";
const downloadError = "Encountered an error while executing downloads.";
+
interface GooglePhotosUploadFailure {
batch: number;
index: number;
url: string;
reason: string;
}
+
interface MediaItem {
baseUrl: string;
}
+
interface NewMediaItem {
description: string;
simpleMediaItem: {
uploadToken: string;
};
}
-const prefix = "google_photos_";
/**
* This manager handles the creation of routes for google photos functionality.
@@ -37,27 +42,47 @@ export default class GooglePhotosManager extends ApiManager {
protected initialize(register: Registration): void {
+ /**
+ * This route receives a list of urls that point to images stored
+ * on Dash's file system, and, in a two step process, uploads them to Google's servers and
+ * returns the information Google generates about the associated uploaded remote images.
+ */
register({
method: Method.POST,
- subscription: "/googlePhotosMediaUpload",
+ subscription: "/googlePhotosMediaPost",
secureHandler: async ({ user, req, res }) => {
const { media } = req.body;
+
+ // first we need to ensure that we know the google account to which these photos will be uploaded
const token = await GoogleApiServerUtils.retrieveAccessToken(user.id);
if (!token) {
return _error(res, authenticationError);
}
+
+ // next, having one large list or even synchronously looping over things trips a threshold
+ // set on Google's servers, and would instantly return an error. So, we ease things out and send the photos to upload in
+ // batches of 25, where the next batch is sent 100 millieconds after we receive a response from Google's servers.
const failed: GooglePhotosUploadFailure[] = [];
const batched = BatchedArray.from<GooglePhotosUploadUtils.UploadSource>(media, { batchSize: 25 });
+ const interval = { magnitude: 100, unit: TimeUnit.Milliseconds };
const newMediaItems = await batched.batchedMapPatientInterval<NewMediaItem>(
- { magnitude: 100, unit: TimeUnit.Milliseconds },
+ interval,
async (batch: any, collector: any, { completedBatches }: any) => {
for (let index = 0; index < batch.length; index++) {
const { url, description } = batch[index];
+ // a local function used to record failure of an upload
const fail = (reason: string) => failed.push({ reason, batch: completedBatches + 1, index, url });
- const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(token, InjectSize(url, SizeSuffix.Original)).catch(fail);
+ // see image resizing - we store the size-agnostic url in our logic, but write out size-suffixed images to the file system
+ // so here, given a size agnostic url, we're just making that conversion so that the file system knows which bytes to actually upload
+ const imageToUpload = InjectSize(url, SizeSuffix.Original);
+ // STEP 1/2: send the raw bytes of the image from our server to Google's servers. We'll get back an upload token
+ // which acts as a pointer to those bytes that we can use to locate them later on
+ const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(token, imageToUpload).catch(fail);
if (!uploadToken) {
fail(`${path.extname(url)} is not an accepted extension`);
} else {
+ // gather the upload token return from Google (a pointer they give us to the raw, currently useless bytes
+ // we've uploaded to their servers) and put in the JSON format that the API accepts for image creation (used soon, below)
collector.push({
description,
simpleMediaItem: { uploadToken }
@@ -66,11 +91,24 @@ export default class GooglePhotosManager extends ApiManager {
}
}
);
- const failedCount = failed.length;
- if (failedCount) {
- console.error(`Unable to upload ${failedCount} image${failedCount === 1 ? "" : "s"} to Google's servers`);
+
+ // inform the developer / server console of any failed upload attempts
+ // does not abort the operation, since some subset of the uploads may have been successful
+ const { length } = failed;
+ if (length) {
+ console.error(`Unable to upload ${length} image${length === 1 ? "" : "s"} to Google's servers`);
console.log(failed.map(({ reason, batch, index, url }) => `@${batch}.${index}: ${url} failed:\n${reason}`).join('\n\n'));
}
+
+ // if none of the preliminary uploads was successful, no need to try and create images
+ // report the failure to the client and return
+ if (!newMediaItems.length) {
+ console.error(red(`${remoteUploadError} Thus, aborting image creation. Please try again.`));
+ _error(res, remoteUploadError);
+ return;
+ }
+
+ // STEP 2/2: create the media items and return the API's response to the client, along with any failures
return GooglePhotosUploadUtils.CreateMediaItems(token, newMediaItems, req.body.album).then(
results => _success(res, { results, failed }),
error => _error(res, mediaError, error)
@@ -78,40 +116,68 @@ export default class GooglePhotosManager extends ApiManager {
}
});
+ /**
+ * This route receives a list of urls that point to images
+ * stored on Google's servers and (following a *rough* heuristic)
+ * uploads each image to Dash's server if it hasn't already been uploaded.
+ * Unfortunately, since Google has so many of these images on its servers,
+ * these user content urls expire every 6 hours. So we can't store the url of a locally uploaded
+ * Google image and compare the candidate url to it to figure out if we already have it,
+ * since the same bytes on their server might now be associated with a new, random url.
+ * So, we do the next best thing and try to use an intrinsic attribute of those bytes as
+ * an identifier: the precise content size. This works in small cases, but has the obvious flaw of failing to upload
+ * an image locally if we already have uploaded another Google user content image with the exact same content size.
+ */
register({
method: Method.POST,
- subscription: "/googlePhotosMediaDownload",
+ subscription: "/googlePhotosMediaGet",
secureHandler: async ({ req, res }) => {
const { mediaItems } = req.body as { mediaItems: MediaItem[] };
+ if (!mediaItems) {
+ // non-starter, since the input was in an invalid format
+ _invalid(res, requestError);
+ return;
+ }
let failed = 0;
- if (mediaItems) {
- const completed: Opt<DashUploadUtils.ImageUploadInformation>[] = [];
- for (const { baseUrl } of mediaItems) {
- const results = await DashUploadUtils.InspectImage(baseUrl);
- if (results instanceof Error) {
- failed++;
- continue;
- }
- const { contentSize, ...attributes } = results;
- const found: Opt<DashUploadUtils.ImageUploadInformation> = await Database.Auxiliary.QueryUploadHistory(contentSize);
- if (!found) {
- const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, undefined, prefix, false).catch(error => _error(res, downloadError, error));
- if (upload) {
- completed.push(upload);
- await Database.Auxiliary.LogUpload(upload);
- } else {
- failed++;
- }
+ const completed: Opt<DashUploadUtils.ImageUploadInformation>[] = [];
+ for (const { baseUrl } of mediaItems) {
+ // start by getting the content size of the remote image
+ const results = await DashUploadUtils.InspectImage(baseUrl);
+ if (results instanceof Error) {
+ // if something went wrong here, we can't hope to upload it, so just move on to the next
+ failed++;
+ continue;
+ }
+ const { contentSize, ...attributes } = results;
+ // check to see if we have uploaded a Google user content image *specifically via this route* already
+ // that has this exact content size
+ const found: Opt<DashUploadUtils.ImageUploadInformation> = await Database.Auxiliary.QueryUploadHistory(contentSize);
+ if (!found) {
+ // if we haven't, then upload it locally to Dash's server
+ const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, undefined, prefix, false).catch(error => _error(res, downloadError, error));
+ if (upload) {
+ completed.push(upload);
+ // inform the heuristic that we've encountered an image with this content size,
+ // to be later checked against in future uploads
+ await Database.Auxiliary.LogUpload(upload);
} else {
- completed.push(found);
+ // make note of a failure to upload locallys
+ failed++;
}
+ } else {
+ // if we have, the variable 'found' is handily the upload information of the
+ // existing image, so we add it to the list as if we had just uploaded it now without actually
+ // making a duplicate write
+ completed.push(found);
}
- if (failed) {
- return _error(res, UploadError(failed));
- }
- return _success(res, completed);
}
- _invalid(res, requestError);
+ // if there are any failures, report a general failure to the client
+ if (failed) {
+ return _error(res, localUploadError(failed));
+ }
+ // otherwise, return the image upload information list corresponding to the newly (or previously)
+ // uploaded images
+ _success(res, completed);
}
});