1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
|
import ApiManager, { Registration } from "./ApiManager";
import { Method, _error, _success, _invalid } from "../RouteManager";
import * as path from "path";
import { GoogleApiServerUtils } from "../apis/google/GoogleApiServerUtils";
import { BatchedArray, TimeUnit } from "array-batcher";
import { GooglePhotosUploadUtils } from "../apis/google/GooglePhotosUploadUtils";
import { Opt } from "../../new_fields/Doc";
import { DashUploadUtils, InjectSize, SizeSuffix } from "../DashUploadUtils";
import { Database } from "../database";
import { red } from "colors";
import { Upload } from "../SharedMediaTypes";
const prefix = "google_photos_";
const remoteUploadError = "None of the preliminary uploads to Google's servers was successful.";
const authenticationError = "Unable to authenticate Google credentials before uploading to Google Photos!";
const mediaError = "Unable to convert all uploaded bytes to media items!";
const localUploadError = (count: number) => `Unable to upload ${count} images to Dash's server`;
const requestError = "Unable to execute download: the body's media items were malformed.";
const downloadError = "Encountered an error while executing downloads.";
interface GooglePhotosUploadFailure {
batch: number;
index: number;
url: string;
reason: string;
}
interface MediaItem {
baseUrl: string;
}
interface NewMediaItem {
description: string;
simpleMediaItem: {
uploadToken: string;
};
}
/**
* This manager handles the creation of routes for google photos functionality.
*/
export default class GooglePhotosManager extends ApiManager {
protected initialize(register: Registration): void {
/**
* This route receives a list of urls that point to images stored
* on Dash's file system, and, in a two step process, uploads them to Google's servers and
* returns the information Google generates about the associated uploaded remote images.
*/
register({
method: Method.POST,
subscription: "/googlePhotosMediaPost",
secureHandler: async ({ user, req, res }) => {
const { media } = req.body;
// first we need to ensure that we know the google account to which these photos will be uploaded
const token = await GoogleApiServerUtils.retrieveAccessToken(user.id);
if (!token) {
return _error(res, authenticationError);
}
// next, having one large list or even synchronously looping over things trips a threshold
// set on Google's servers, and would instantly return an error. So, we ease things out and send the photos to upload in
// batches of 25, where the next batch is sent 100 millieconds after we receive a response from Google's servers.
const failed: GooglePhotosUploadFailure[] = [];
const batched = BatchedArray.from<GooglePhotosUploadUtils.UploadSource>(media, { batchSize: 25 });
const interval = { magnitude: 100, unit: TimeUnit.Milliseconds };
const newMediaItems = await batched.batchedMapPatientInterval<NewMediaItem>(
interval,
async (batch: any, collector: any, { completedBatches }: any) => {
for (let index = 0; index < batch.length; index++) {
const { url, description } = batch[index];
// a local function used to record failure of an upload
const fail = (reason: string) => failed.push({ reason, batch: completedBatches + 1, index, url });
// see image resizing - we store the size-agnostic url in our logic, but write out size-suffixed images to the file system
// so here, given a size agnostic url, we're just making that conversion so that the file system knows which bytes to actually upload
const imageToUpload = InjectSize(url, SizeSuffix.Original);
// STEP 1/2: send the raw bytes of the image from our server to Google's servers. We'll get back an upload token
// which acts as a pointer to those bytes that we can use to locate them later on
const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(token, imageToUpload).catch(fail);
if (!uploadToken) {
fail(`${path.extname(url)} is not an accepted extension`);
} else {
// gather the upload token return from Google (a pointer they give us to the raw, currently useless bytes
// we've uploaded to their servers) and put in the JSON format that the API accepts for image creation (used soon, below)
collector.push({
description,
simpleMediaItem: { uploadToken }
});
}
}
}
);
// inform the developer / server console of any failed upload attempts
// does not abort the operation, since some subset of the uploads may have been successful
const { length } = failed;
if (length) {
console.error(`Unable to upload ${length} image${length === 1 ? "" : "s"} to Google's servers`);
console.log(failed.map(({ reason, batch, index, url }) => `@${batch}.${index}: ${url} failed:\n${reason}`).join('\n\n'));
}
// if none of the preliminary uploads was successful, no need to try and create images
// report the failure to the client and return
if (!newMediaItems.length) {
console.error(red(`${remoteUploadError} Thus, aborting image creation. Please try again.`));
_error(res, remoteUploadError);
return;
}
// STEP 2/2: create the media items and return the API's response to the client, along with any failures
return GooglePhotosUploadUtils.CreateMediaItems(token, newMediaItems, req.body.album).then(
results => _success(res, { results, failed }),
error => _error(res, mediaError, error)
);
}
});
/**
* This route receives a list of urls that point to images
* stored on Google's servers and (following a *rough* heuristic)
* uploads each image to Dash's server if it hasn't already been uploaded.
* Unfortunately, since Google has so many of these images on its servers,
* these user content urls expire every 6 hours. So we can't store the url of a locally uploaded
* Google image and compare the candidate url to it to figure out if we already have it,
* since the same bytes on their server might now be associated with a new, random url.
* So, we do the next best thing and try to use an intrinsic attribute of those bytes as
* an identifier: the precise content size. This works in small cases, but has the obvious flaw of failing to upload
* an image locally if we already have uploaded another Google user content image with the exact same content size.
*/
register({
method: Method.POST,
subscription: "/googlePhotosMediaGet",
secureHandler: async ({ req, res }) => {
const { mediaItems } = req.body as { mediaItems: MediaItem[] };
if (!mediaItems) {
// non-starter, since the input was in an invalid format
_invalid(res, requestError);
return;
}
let failed = 0;
const completed: Opt<Upload.ImageInformation>[] = [];
for (const { baseUrl } of mediaItems) {
// start by getting the content size of the remote image
const results = await DashUploadUtils.InspectImage(baseUrl);
if (results instanceof Error) {
// if something went wrong here, we can't hope to upload it, so just move on to the next
failed++;
continue;
}
const { contentSize, ...attributes } = results;
// check to see if we have uploaded a Google user content image *specifically via this route* already
// that has this exact content size
const found: Opt<Upload.ImageInformation> = await Database.Auxiliary.QueryUploadHistory(contentSize);
if (!found) {
// if we haven't, then upload it locally to Dash's server
const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, undefined, prefix, false).catch(error => _error(res, downloadError, error));
if (upload) {
completed.push(upload);
// inform the heuristic that we've encountered an image with this content size,
// to be later checked against in future uploads
await Database.Auxiliary.LogUpload(upload);
} else {
// make note of a failure to upload locallys
failed++;
}
} else {
// if we have, the variable 'found' is handily the upload information of the
// existing image, so we add it to the list as if we had just uploaded it now without actually
// making a duplicate write
completed.push(found);
}
}
// if there are any failures, report a general failure to the client
if (failed) {
return _error(res, localUploadError(failed));
}
// otherwise, return the image upload information list corresponding to the newly (or previously)
// uploaded images
_success(res, completed);
}
});
}
}
|