aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/Utils.ts6
-rw-r--r--src/client/apis/google_docs/GooglePhotosClientUtils.ts8
-rw-r--r--src/client/views/Main.tsx3
-rw-r--r--src/extensions/ArrayExtensions.ts325
-rw-r--r--src/extensions/Extensions.ts7
-rw-r--r--src/extensions/General/Extensions.ts9
-rw-r--r--src/extensions/General/ExtensionsTypings.ts8
-rw-r--r--src/extensions/StringExtensions.ts11
-rw-r--r--src/server/DashUploadUtils.ts143
-rw-r--r--src/server/apis/google/CustomizedWrapper/filters.js46
-rw-r--r--src/server/apis/google/GoogleApiServerUtils.ts32
-rw-r--r--src/server/apis/google/GooglePhotosUploadUtils.ts147
-rw-r--r--src/server/apis/google/existing_uploads.json0
-rw-r--r--src/server/credentials/google_docs_token.json8
-rw-r--r--src/server/database.ts402
-rw-r--r--src/server/index.ts49
16 files changed, 465 insertions, 739 deletions
diff --git a/src/Utils.ts b/src/Utils.ts
index 6489eff77..5f06b5cec 100644
--- a/src/Utils.ts
+++ b/src/Utils.ts
@@ -4,6 +4,7 @@ import { Socket } from 'socket.io';
import { Message } from './server/Message';
import { RouteStore } from './server/RouteStore';
import requestPromise = require('request-promise');
+import { CurrentUserUtils } from './server/authentication/models/current_user_utils';
export class Utils {
@@ -292,12 +293,13 @@ export namespace JSONUtils {
}
-export function PostToServer(relativeRoute: string, body: any) {
+export function PostToServer(relativeRoute: string, body?: any) {
+ body = { userId: CurrentUserUtils.id, ...body };
let options = {
method: "POST",
uri: Utils.prepend(relativeRoute),
json: true,
- body: body
+ body
};
return requestPromise.post(options);
} \ No newline at end of file
diff --git a/src/client/apis/google_docs/GooglePhotosClientUtils.ts b/src/client/apis/google_docs/GooglePhotosClientUtils.ts
index 559b8fd6a..c45a49f1a 100644
--- a/src/client/apis/google_docs/GooglePhotosClientUtils.ts
+++ b/src/client/apis/google_docs/GooglePhotosClientUtils.ts
@@ -12,18 +12,12 @@ import { FormattedTextBox } from "../../views/nodes/FormattedTextBox";
import { Docs, DocumentOptions } from "../../documents/Documents";
import { NewMediaItemResult, MediaItem } from "../../../server/apis/google/SharedTypes";
import { AssertionError } from "assert";
-import { List } from "../../../new_fields/List";
-import { listSpec } from "../../../new_fields/Schema";
import { DocumentView } from "../../views/nodes/DocumentView";
import { DocumentManager } from "../../util/DocumentManager";
export namespace GooglePhotos {
- const endpoint = async () => {
- const getToken = Utils.prepend(RouteStore.googlePhotosAccessToken);
- const token = await (await fetch(getToken)).text();
- return new Photos(token);
- };
+ const endpoint = async () => new Photos(await PostToServer(RouteStore.googlePhotosAccessToken));
export enum MediaType {
ALL_MEDIA = 'ALL_MEDIA',
diff --git a/src/client/views/Main.tsx b/src/client/views/Main.tsx
index 70d2235e6..3bd898ac0 100644
--- a/src/client/views/Main.tsx
+++ b/src/client/views/Main.tsx
@@ -7,6 +7,9 @@ import { Cast } from "../../new_fields/Types";
import { Doc, DocListCastAsync } from "../../new_fields/Doc";
import { List } from "../../new_fields/List";
import { DocServer } from "../DocServer";
+import { AssignAllExtensions } from "../../extensions/General/Extensions";
+
+AssignAllExtensions();
let swapDocs = async () => {
let oldDoc = await Cast(CurrentUserUtils.UserDocument.linkManagerDoc, Doc);
diff --git a/src/extensions/ArrayExtensions.ts b/src/extensions/ArrayExtensions.ts
index ca407862b..422a10dbc 100644
--- a/src/extensions/ArrayExtensions.ts
+++ b/src/extensions/ArrayExtensions.ts
@@ -1,318 +1,13 @@
-interface Array<T> {
- lastElement(): T;
-}
-
-// interface BatchContext {
-// completedBatches: number;
-// remainingBatches: number;
-// }
-
-// interface ExecutorResult<A> {
-// updated: A;
-// makeNextBatch: boolean;
-// }
-
-// interface PredicateBatcherCommon<A> {
-// initial: A;
-// persistAccumulator?: boolean;
-// }
-
-// interface Interval {
-// magnitude: number;
-// unit: typeof module.exports.TimeUnit;
-// }
-
-// type BatchConverterSync<I, O> = (batch: I[], context: BatchContext) => O[];
-// type BatchConverterAsync<I, O> = (batch: I[], context: BatchContext) => Promise<O[]>;
-// type BatchConverter<I, O> = BatchConverterSync<I, O> | BatchConverterAsync<I, O>;
-
-// type BatchHandlerSync<I> = (batch: I[], context: BatchContext) => void;
-// type BatchHandlerAsync<I> = (batch: I[], context: BatchContext) => Promise<void>;
-// type BatchHandler<I> = BatchHandlerSync<I> | BatchHandlerAsync<I>;
-
-// type BatcherSync<I, A> = FixedBatcher | PredicateBatcherSync<I, A>;
-// type BatcherAsync<I, A> = PredicateBatcherAsync<I, A>;
-// type Batcher<I, A> = BatcherSync<I, A> | BatcherAsync<I, A>;
-
-// type FixedBatcher = { batchSize: number } | { batchCount: number, mode?: typeof module.exports.Mode };
-// type PredicateBatcherSync<I, A> = PredicateBatcherCommon<A> & { executor: (element: I, accumulator: A) => ExecutorResult<A> };
-// type PredicateBatcherAsync<I, A> = PredicateBatcherCommon<A> & { executorAsync: (element: I, accumulator: A) => Promise<ExecutorResult<A>> };
-
-
-// module.exports.Mode = {
-// Balanced: 0,
-// Even: 1
-// };
-
-// module.exports.TimeUnit = {
-// Milliseconds: 0,
-// Seconds: 1,
-// Minutes: 2
-// };
-
-// module.exports.Assign = function () {
-
-// Array.prototype.fixedBatch = function <T>(batcher: FixedBatcher): T[][] {
-// const batches: T[][] = [];
-// const length = this.length;
-// let i = 0;
-// if ("batchSize" in batcher) {
-// const { batchSize } = batcher;
-// while (i < this.length) {
-// const cap = Math.min(i + batchSize, length);
-// batches.push(this.slice(i, i = cap));
-// }
-// } else if ("batchCount" in batcher) {
-// let { batchCount, mode } = batcher;
-// const resolved = mode || module.exports.Mode.Balanced;
-// if (batchCount < 1) {
-// throw new Error("Batch count must be a positive integer!");
-// }
-// if (batchCount === 1) {
-// return [this];
-// }
-// if (batchCount >= this.length) {
-// return this.map((element: T) => [element]);
-// }
+function Assign() {
-// let length = this.length;
-// let size: number;
+ Array.prototype.lastElement = function <T>() {
+ if (!this.length) {
+ return undefined;
+ }
+ const last: T = this[this.length - 1];
+ return last;
+ };
-// if (length % batchCount === 0) {
-// size = Math.floor(length / batchCount);
-// while (i < length) {
-// batches.push(this.slice(i, i += size));
-// }
-// } else if (resolved === module.exports.Mode.Balanced) {
-// while (i < length) {
-// size = Math.ceil((length - i) / batchCount--);
-// batches.push(this.slice(i, i += size));
-// }
-// } else {
-// batchCount--;
-// size = Math.floor(length / batchCount);
-// if (length % size === 0) {
-// size--;
-// }
-// while (i < size * batchCount) {
-// batches.push(this.slice(i, i += size));
-// }
-// batches.push(this.slice(size * batchCount));
-// }
-// }
-// return batches;
-// };
-
-// Array.prototype.predicateBatch = function <T, A>(batcher: PredicateBatcherSync<T, A>): T[][] {
-// const batches: T[][] = [];
-// let batch: T[] = [];
-// const { executor, initial, persistAccumulator } = batcher;
-// let accumulator = initial;
-// for (let element of this) {
-// const { updated, makeNextBatch } = executor(element, accumulator);
-// accumulator = updated;
-// if (!makeNextBatch) {
-// batch.push(element);
-// } else {
-// batches.push(batch);
-// batch = [element];
-// if (!persistAccumulator) {
-// accumulator = initial;
-// }
-// }
-// }
-// batches.push(batch);
-// return batches;
-// };
-
-// Array.prototype.predicateBatchAsync = async function <T, A>(batcher: BatcherAsync<T, A>): Promise<T[][]> {
-// const batches: T[][] = [];
-// let batch: T[] = [];
-// const { executorAsync, initial, persistAccumulator } = batcher;
-// let accumulator: A = initial;
-// for (let element of this) {
-// const { updated, makeNextBatch } = await executorAsync(element, accumulator);
-// accumulator = updated;
-// if (!makeNextBatch) {
-// batch.push(element);
-// } else {
-// batches.push(batch);
-// batch = [element];
-// if (!persistAccumulator) {
-// accumulator = initial;
-// }
-// }
-// }
-// batches.push(batch);
-// return batches;
-// };
-
-// Array.prototype.batch = function <T, A>(batcher: BatcherSync<T, A>): T[][] {
-// if ("executor" in batcher) {
-// return this.predicateBatch(batcher);
-// } else {
-// return this.fixedBatch(batcher);
-// }
-// };
-
-// Array.prototype.batchAsync = async function <T, A>(batcher: Batcher<T, A>): Promise<T[][]> {
-// if ("executorAsync" in batcher) {
-// return this.predicateBatchAsync(batcher);
-// } else {
-// return this.batch(batcher);
-// }
-// };
-
-// Array.prototype.batchedForEach = function <I, A>(batcher: BatcherSync<I, A>, handler: BatchHandlerSync<I>): void {
-// if (this.length) {
-// let completed = 0;
-// const batches = this.batch(batcher);
-// const quota = batches.length;
-// for (let batch of batches) {
-// const context: BatchContext = {
-// completedBatches: completed,
-// remainingBatches: quota - completed,
-// };
-// handler(batch, context);
-// completed++;
-// }
-// }
-// };
-
-// Array.prototype.batchedMap = function <I, O, A>(batcher: BatcherSync<I, A>, handler: BatchConverterSync<I, O>): O[] {
-// if (!this.length) {
-// return [];
-// }
-// let collector: O[] = [];
-// let completed = 0;
-// const batches = this.batch(batcher);
-// const quota = batches.length;
-// for (let batch of batches) {
-// const context: BatchContext = {
-// completedBatches: completed,
-// remainingBatches: quota - completed,
-// };
-// collector.push(...handler(batch, context));
-// completed++;
-// }
-// return collector;
-// };
-
-// Array.prototype.batchedForEachAsync = async function <I, A>(batcher: Batcher<I, A>, handler: BatchHandler<I>): Promise<void> {
-// if (this.length) {
-// let completed = 0;
-// const batches = await this.batchAsync(batcher);
-// const quota = batches.length;
-// for (let batch of batches) {
-// const context: BatchContext = {
-// completedBatches: completed,
-// remainingBatches: quota - completed,
-// };
-// await handler(batch, context);
-// completed++;
-// }
-// }
-// };
-
-// Array.prototype.batchedMapAsync = async function <I, O, A>(batcher: Batcher<I, A>, handler: BatchConverter<I, O>): Promise<O[]> {
-// if (!this.length) {
-// return [];
-// }
-// let collector: O[] = [];
-// let completed = 0;
-// const batches = await this.batchAsync(batcher);
-// const quota = batches.length;
-// for (let batch of batches) {
-// const context: BatchContext = {
-// completedBatches: completed,
-// remainingBatches: quota - completed,
-// };
-// collector.push(...(await handler(batch, context)));
-// completed++;
-// }
-// return collector;
-// };
-
-// Array.prototype.batchedForEachInterval = async function <I, A>(batcher: Batcher<I, A>, handler: BatchHandler<I>, interval: Interval): Promise<void> {
-// if (!this.length) {
-// return;
-// }
-// const batches = await this.batchAsync(batcher);
-// const quota = batches.length;
-// return new Promise<void>(async resolve => {
-// const iterator = batches[Symbol.iterator]();
-// let completed = 0;
-// while (true) {
-// const next = iterator.next();
-// await new Promise<void>(resolve => {
-// setTimeout(async () => {
-// const batch = next.value;
-// const context: BatchContext = {
-// completedBatches: completed,
-// remainingBatches: quota - completed,
-// };
-// await handler(batch, context);
-// resolve();
-// }, convert(interval));
-// });
-// if (++completed === quota) {
-// break;
-// }
-// }
-// resolve();
-// });
-// };
-
-// Array.prototype.batchedMapInterval = async function <I, O, A>(batcher: Batcher<I, A>, handler: BatchConverter<I, O>, interval: Interval): Promise<O[]> {
-// if (!this.length) {
-// return [];
-// }
-// let collector: O[] = [];
-// const batches = await this.batchAsync(batcher);
-// const quota = batches.length;
-// return new Promise<O[]>(async resolve => {
-// const iterator = batches[Symbol.iterator]();
-// let completed = 0;
-// while (true) {
-// const next = iterator.next();
-// await new Promise<void>(resolve => {
-// setTimeout(async () => {
-// const batch = next.value;
-// const context: BatchContext = {
-// completedBatches: completed,
-// remainingBatches: quota - completed,
-// };
-// collector.push(...(await handler(batch, context)));
-// resolve();
-// }, convert(interval));
-// });
-// if (++completed === quota) {
-// resolve(collector);
-// break;
-// }
-// }
-// });
-// };
-
-Array.prototype.lastElement = function <T>() {
- if (!this.length) {
- return undefined;
- }
- const last: T = this[this.length - 1];
- return last;
-};
-
-// };
+}
-// const convert = (interval: Interval) => {
-// const { magnitude, unit } = interval;
-// switch (unit) {
-// default:
-// case module.exports.TimeUnit.Milliseconds:
-// return magnitude;
-// case module.exports.TimeUnit.Seconds:
-// return magnitude * 1000;
-// case module.exports.TimeUnit.Minutes:
-// return magnitude * 1000 * 60;
-// }
-// }; \ No newline at end of file
+export { Assign }; \ No newline at end of file
diff --git a/src/extensions/Extensions.ts b/src/extensions/Extensions.ts
deleted file mode 100644
index 1391140b9..000000000
--- a/src/extensions/Extensions.ts
+++ /dev/null
@@ -1,7 +0,0 @@
-const ArrayExtensions = require("./ArrayExtensions");
-const StringExtensions = require("./StringExtensions");
-
-module.exports.AssignExtensions = function () {
- // ArrayExtensions.Assign();
- StringExtensions.Assign();
-}; \ No newline at end of file
diff --git a/src/extensions/General/Extensions.ts b/src/extensions/General/Extensions.ts
new file mode 100644
index 000000000..4b6d05d5f
--- /dev/null
+++ b/src/extensions/General/Extensions.ts
@@ -0,0 +1,9 @@
+import { Assign as ArrayAssign } from "../ArrayExtensions";
+import { Assign as StringAssign } from "../StringExtensions";
+
+function AssignAllExtensions() {
+ ArrayAssign();
+ StringAssign();
+}
+
+export { AssignAllExtensions }; \ No newline at end of file
diff --git a/src/extensions/General/ExtensionsTypings.ts b/src/extensions/General/ExtensionsTypings.ts
new file mode 100644
index 000000000..370157ed0
--- /dev/null
+++ b/src/extensions/General/ExtensionsTypings.ts
@@ -0,0 +1,8 @@
+interface Array<T> {
+ lastElement(): T;
+}
+
+interface String {
+ removeTrailingNewlines(): string;
+ hasNewline(): boolean;
+} \ No newline at end of file
diff --git a/src/extensions/StringExtensions.ts b/src/extensions/StringExtensions.ts
index 4cdbdebf7..2c76e56c8 100644
--- a/src/extensions/StringExtensions.ts
+++ b/src/extensions/StringExtensions.ts
@@ -1,9 +1,4 @@
-interface String {
- removeTrailingNewlines(): string;
- hasNewline(): boolean;
-}
-
-module.exports.Assign = function () {
+function Assign() {
String.prototype.removeTrailingNewlines = function () {
let sliced = this;
@@ -17,4 +12,6 @@ module.exports.Assign = function () {
return this.endsWith("\n");
};
-}; \ No newline at end of file
+}
+
+export { Assign }; \ No newline at end of file
diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts
new file mode 100644
index 000000000..66874e96c
--- /dev/null
+++ b/src/server/DashUploadUtils.ts
@@ -0,0 +1,143 @@
+import * as fs from 'fs';
+import { Utils } from '../Utils';
+import * as path from 'path';
+import { Opt } from '../new_fields/Doc';
+import * as sharp from 'sharp';
+import request = require('request-promise');
+
+const uploadDirectory = path.join(__dirname, './public/files/');
+
+export namespace DashUploadUtils {
+
+ export interface Size {
+ width: number;
+ suffix: string;
+ }
+
+ export const Sizes: { [size: string]: Size } = {
+ SMALL: { width: 100, suffix: "_s" },
+ MEDIUM: { width: 400, suffix: "_m" },
+ LARGE: { width: 900, suffix: "_l" },
+ };
+
+ const gifs = [".gif"];
+ const pngs = [".png"];
+ const jpgs = [".jpg", ".jpeg"];
+ const imageFormats = [...pngs, ...jpgs, ...gifs];
+ const videoFormats = [".mov", ".mp4"];
+
+ const size = "content-length";
+ const type = "content-type";
+
+ export interface UploadInformation {
+ mediaPaths: string[];
+ fileNames: { [key: string]: string };
+ contentSize?: number;
+ contentType?: string;
+ }
+
+ const generate = (prefix: string, url: string) => `${prefix}upload_${Utils.GenerateGuid()}${path.extname(url).toLowerCase()}`;
+ const sanitize = (filename: string) => filename.replace(/\s+/g, "_");
+
+ export interface InspectionResults {
+ isLocal: boolean;
+ stream: any;
+ normalizedUrl: string;
+ contentSize?: number;
+ contentType?: string;
+ }
+
+ export const InspectImage = async (url: string): Promise<InspectionResults> => {
+ const { isLocal, stream, normalized: normalizedUrl } = classify(url);
+ const results = {
+ isLocal,
+ stream,
+ normalizedUrl
+ };
+ if (isLocal) {
+ return results;
+ }
+ const metadata = (await new Promise<any>((resolve, reject) => {
+ request.head(url, async (error, res) => {
+ if (error) {
+ return reject(error);
+ }
+ resolve(res);
+ });
+ })).headers;
+ return {
+ contentSize: parseInt(metadata[size]),
+ contentType: metadata[type],
+ ...results
+ };
+ };
+
+ export const UploadImage = async (metadata: InspectionResults, filename?: string, prefix = ""): Promise<Opt<UploadInformation>> => {
+ const { isLocal, stream, normalizedUrl, contentSize, contentType } = metadata;
+ const resolved = filename ? sanitize(filename) : generate(prefix, normalizedUrl);
+ let extension = path.extname(normalizedUrl) || path.extname(resolved);
+ extension && (extension = extension.toLowerCase());
+ let information: UploadInformation = {
+ mediaPaths: [],
+ fileNames: { clean: resolved },
+ contentSize,
+ contentType,
+ };
+ return new Promise<UploadInformation>(async (resolve, reject) => {
+ const resizers = [
+ { resizer: sharp().rotate(), suffix: "_o" },
+ ...Object.values(Sizes).map(size => ({
+ resizer: sharp().resize(size.width, undefined, { withoutEnlargement: true }).rotate(),
+ suffix: size.suffix
+ }))
+ ];
+ let nonVisual = false;
+ if (pngs.includes(extension)) {
+ resizers.forEach(element => element.resizer = element.resizer.png());
+ } else if (jpgs.includes(extension)) {
+ resizers.forEach(element => element.resizer = element.resizer.jpeg());
+ } else if (![...imageFormats, ...videoFormats].includes(extension.toLowerCase())) {
+ nonVisual = true;
+ }
+ if (imageFormats.includes(extension)) {
+ for (let resizer of resizers) {
+ const suffix = resizer.suffix;
+ let mediaPath: string;
+ await new Promise<void>(resolve => {
+ const filename = resolved.substring(0, resolved.length - extension.length) + suffix + extension;
+ information.mediaPaths.push(mediaPath = uploadDirectory + filename);
+ information.fileNames[suffix] = filename;
+ stream(normalizedUrl).pipe(resizer.resizer).pipe(fs.createWriteStream(mediaPath))
+ .on('close', resolve)
+ .on('error', reject);
+ });
+ }
+ }
+ if (!isLocal || nonVisual) {
+ await new Promise<void>(resolve => {
+ stream(normalizedUrl).pipe(fs.createWriteStream(uploadDirectory + resolved)).on('close', resolve);
+ });
+ }
+ resolve(information);
+ });
+ };
+
+ const classify = (url: string) => {
+ const isLocal = /Dash-Web(\\|\/)src(\\|\/)server(\\|\/)public(\\|\/)files/g.test(url);
+ return {
+ isLocal,
+ stream: isLocal ? fs.createReadStream : request,
+ normalized: isLocal ? path.normalize(url) : url
+ };
+ };
+
+ export const createIfNotExists = async (path: string) => {
+ if (await new Promise<boolean>(resolve => fs.exists(path, resolve))) {
+ return true;
+ }
+ return new Promise<boolean>(resolve => fs.mkdir(path, error => resolve(error === null)));
+ };
+
+ export const Destroy = (mediaPath: string) => new Promise<boolean>(resolve => fs.unlink(mediaPath, error => resolve(error === null)));
+
+} \ No newline at end of file
diff --git a/src/server/apis/google/CustomizedWrapper/filters.js b/src/server/apis/google/CustomizedWrapper/filters.js
deleted file mode 100644
index 576a90b75..000000000
--- a/src/server/apis/google/CustomizedWrapper/filters.js
+++ /dev/null
@@ -1,46 +0,0 @@
-'use strict';
-
-const DateFilter = require('../common/date_filter');
-const MediaTypeFilter = require('./media_type_filter');
-const ContentFilter = require('./content_filter');
-
-class Filters {
- constructor(includeArchivedMedia = false) {
- this.includeArchivedMedia = includeArchivedMedia;
- }
-
- setDateFilter(dateFilter) {
- this.dateFilter = dateFilter;
- return this;
- }
-
- setContentFilter(contentFilter) {
- this.contentFilter = contentFilter;
- return this;
- }
-
- setMediaTypeFilter(mediaTypeFilter) {
- this.mediaTypeFilter = mediaTypeFilter;
- return this;
- }
-
- setIncludeArchivedMedia(includeArchivedMedia) {
- this.includeArchivedMedia = includeArchivedMedia;
- return this;
- }
-
- toJSON() {
- return {
- dateFilter: this.dateFilter instanceof DateFilter ? this.dateFilter.toJSON() : this.dateFilter,
- mediaTypeFilter: this.mediaTypeFilter instanceof MediaTypeFilter ?
- this.mediaTypeFilter.toJSON() :
- this.mediaTypeFilter,
- contentFilter: this.contentFilter instanceof ContentFilter ?
- this.contentFilter.toJSON() :
- this.contentFilter,
- includeArchivedMedia: this.includeArchivedMedia
- };
- }
-}
-
-module.exports = Filters; \ No newline at end of file
diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts
index e0bd8a800..684a8081b 100644
--- a/src/server/apis/google/GoogleApiServerUtils.ts
+++ b/src/server/apis/google/GoogleApiServerUtils.ts
@@ -8,7 +8,7 @@ import { GaxiosResponse } from "gaxios";
import request = require('request-promise');
import * as qs from 'query-string';
import Photos = require('googlephotos');
-
+import { Database } from "../../database";
/**
* Server side authentication for Google Api queries.
*/
@@ -35,9 +35,9 @@ export namespace GoogleApiServerUtils {
Slides = "Slides"
}
- export interface CredentialPaths {
+ export interface CredentialInformation {
credentialsPath: string;
- tokenPath: string;
+ userId: string;
}
export type ApiResponse = Promise<GaxiosResponse>;
@@ -48,7 +48,7 @@ export namespace GoogleApiServerUtils {
export type Endpoint = { get: ApiHandler, create: ApiHandler, batchUpdate: ApiHandler };
export type EndpointParameters = GlobalOptions & { version: "v1" };
- export const GetEndpoint = (sector: string, paths: CredentialPaths) => {
+ export const GetEndpoint = (sector: string, paths: CredentialInformation) => {
return new Promise<Opt<Endpoint>>(resolve => {
RetrieveCredentials(paths).then(authentication => {
let routed: Opt<Endpoint>;
@@ -66,28 +66,28 @@ export namespace GoogleApiServerUtils {
});
};
- export const RetrieveCredentials = (paths: CredentialPaths) => {
+ export const RetrieveCredentials = (information: CredentialInformation) => {
return new Promise<TokenResult>((resolve, reject) => {
- readFile(paths.credentialsPath, async (err, credentials) => {
+ readFile(information.credentialsPath, async (err, credentials) => {
if (err) {
reject(err);
return console.log('Error loading client secret file:', err);
}
- authorize(parseBuffer(credentials), paths.tokenPath).then(resolve, reject);
+ authorize(parseBuffer(credentials), information.userId).then(resolve, reject);
});
});
};
- export const RetrieveAccessToken = (paths: CredentialPaths) => {
+ export const RetrieveAccessToken = (information: CredentialInformation) => {
return new Promise<string>((resolve, reject) => {
- RetrieveCredentials(paths).then(
+ RetrieveCredentials(information).then(
credentials => resolve(credentials.token.access_token!),
error => reject(`Error: unable to authenticate Google Photos API request.\n${error}`)
);
});
};
- export const RetrievePhotosEndpoint = (paths: CredentialPaths) => {
+ export const RetrievePhotosEndpoint = (paths: CredentialInformation) => {
return new Promise<any>((resolve, reject) => {
RetrieveAccessToken(paths).then(
token => resolve(new Photos(token)),
@@ -101,20 +101,20 @@ export namespace GoogleApiServerUtils {
* Create an OAuth2 client with the given credentials, and returns the promise resolving to the authenticated client
* @param {Object} credentials The authorization client credentials.
*/
- export function authorize(credentials: any, token_path: string): Promise<TokenResult> {
+ export function authorize(credentials: any, userId: string): Promise<TokenResult> {
const { client_secret, client_id, redirect_uris } = credentials.installed;
const oAuth2Client = new google.auth.OAuth2(
client_id, client_secret, redirect_uris[0]);
return new Promise<TokenResult>((resolve, reject) => {
- readFile(token_path, (err, token) => {
- // Check if we have previously stored a token.
- if (err) {
- return getNewToken(oAuth2Client, token_path).then(resolve, reject);
+ Database.Auxiliary.FetchGoogleAuthenticationToken(userId).then(token => {
+ // Check if we have previously stored a token for this userId.
+ if (!token) {
+ return getNewToken(oAuth2Client, userId).then(resolve, reject);
}
let parsed: Credentials = parseBuffer(token);
if (parsed.expiry_date! < new Date().getTime()) {
- return refreshToken(parsed, client_id, client_secret, oAuth2Client, token_path).then(resolve, reject);
+ return refreshToken(parsed, client_id, client_secret, oAuth2Client, userId).then(resolve, reject);
}
oAuth2Client.setCredentials(parsed);
resolve({ token: parsed, client: oAuth2Client });
diff --git a/src/server/apis/google/GooglePhotosUploadUtils.ts b/src/server/apis/google/GooglePhotosUploadUtils.ts
index 4dc252577..507a868a3 100644
--- a/src/server/apis/google/GooglePhotosUploadUtils.ts
+++ b/src/server/apis/google/GooglePhotosUploadUtils.ts
@@ -1,16 +1,10 @@
import request = require('request-promise');
import { GoogleApiServerUtils } from './GoogleApiServerUtils';
-import * as fs from 'fs';
-import { Utils } from '../../../Utils';
import * as path from 'path';
-import { Opt } from '../../../new_fields/Doc';
-import * as sharp from 'sharp';
import { MediaItemCreationResult } from './SharedTypes';
import { NewMediaItem } from "../../index";
import BatchedArray, { FixedBatcher, TimeUnit, Interval } from "array-batcher";
-const uploadDirectory = path.join(__dirname, "../../public/files/");
-
export namespace GooglePhotosUploadUtils {
export interface Paths {
@@ -31,12 +25,9 @@ export namespace GooglePhotosUploadUtils {
});
let Bearer: string;
- let Paths: Paths;
- export const initialize = async (paths: Paths) => {
- Paths = paths;
- const { tokenPath, credentialsPath } = paths;
- const token = await GoogleApiServerUtils.RetrieveAccessToken({ tokenPath, credentialsPath });
+ export const initialize = async (information: GoogleApiServerUtils.CredentialInformation) => {
+ const token = await GoogleApiServerUtils.RetrieveAccessToken(information);
Bearer = `Bearer ${token}`;
};
@@ -87,138 +78,4 @@ export namespace GooglePhotosUploadUtils {
return { newMediaItemResults };
};
-}
-
-export namespace DownloadUtils {
-
- export interface Size {
- width: number;
- suffix: string;
- }
-
- export const Sizes: { [size: string]: Size } = {
- SMALL: { width: 100, suffix: "_s" },
- MEDIUM: { width: 400, suffix: "_m" },
- LARGE: { width: 900, suffix: "_l" },
- };
-
- const gifs = [".gif"];
- const pngs = [".png"];
- const jpgs = [".jpg", ".jpeg"];
- const imageFormats = [...pngs, ...jpgs, ...gifs];
- const videoFormats = [".mov", ".mp4"];
-
- const size = "content-length";
- const type = "content-type";
-
- export interface UploadInformation {
- mediaPaths: string[];
- fileNames: { [key: string]: string };
- contentSize?: number;
- contentType?: string;
- }
-
- const generate = (prefix: string, url: string) => `${prefix}upload_${Utils.GenerateGuid()}${path.extname(url).toLowerCase()}`;
- const sanitize = (filename: string) => filename.replace(/\s+/g, "_");
-
- export interface InspectionResults {
- isLocal: boolean;
- stream: any;
- normalizedUrl: string;
- contentSize?: number;
- contentType?: string;
- }
-
- export const InspectImage = async (url: string): Promise<InspectionResults> => {
- const { isLocal, stream, normalized: normalizedUrl } = classify(url);
- const results = {
- isLocal,
- stream,
- normalizedUrl
- };
- if (isLocal) {
- return results;
- }
- const metadata = (await new Promise<any>((resolve, reject) => {
- request.head(url, async (error, res) => {
- if (error) {
- return reject(error);
- }
- resolve(res);
- });
- })).headers;
- return {
- contentSize: parseInt(metadata[size]),
- contentType: metadata[type],
- ...results
- };
- };
-
- export const UploadImage = async (metadata: InspectionResults, filename?: string, prefix = ""): Promise<Opt<UploadInformation>> => {
- const { isLocal, stream, normalizedUrl, contentSize, contentType } = metadata;
- const resolved = filename ? sanitize(filename) : generate(prefix, normalizedUrl);
- let extension = path.extname(normalizedUrl) || path.extname(resolved);
- extension && (extension = extension.toLowerCase());
- let information: UploadInformation = {
- mediaPaths: [],
- fileNames: { clean: resolved },
- contentSize,
- contentType,
- };
- return new Promise<UploadInformation>(async (resolve, reject) => {
- const resizers = [
- { resizer: sharp().rotate(), suffix: "_o" },
- ...Object.values(Sizes).map(size => ({
- resizer: sharp().resize(size.width, undefined, { withoutEnlargement: true }).rotate(),
- suffix: size.suffix
- }))
- ];
- let nonVisual = false;
- if (pngs.includes(extension)) {
- resizers.forEach(element => element.resizer = element.resizer.png());
- } else if (jpgs.includes(extension)) {
- resizers.forEach(element => element.resizer = element.resizer.jpeg());
- } else if (![...imageFormats, ...videoFormats].includes(extension.toLowerCase())) {
- nonVisual = true;
- }
- if (imageFormats.includes(extension)) {
- for (let resizer of resizers) {
- const suffix = resizer.suffix;
- let mediaPath: string;
- await new Promise<void>(resolve => {
- const filename = resolved.substring(0, resolved.length - extension.length) + suffix + extension;
- information.mediaPaths.push(mediaPath = uploadDirectory + filename);
- information.fileNames[suffix] = filename;
- stream(normalizedUrl).pipe(resizer.resizer).pipe(fs.createWriteStream(mediaPath))
- .on('close', resolve)
- .on('error', reject);
- });
- }
- }
- if (!isLocal || nonVisual) {
- await new Promise<void>(resolve => {
- stream(normalizedUrl).pipe(fs.createWriteStream(uploadDirectory + resolved)).on('close', resolve);
- });
- }
- resolve(information);
- });
- };
-
- const classify = (url: string) => {
- const isLocal = /Dash-Web(\\|\/)src(\\|\/)server(\\|\/)public(\\|\/)files/g.test(url);
- return {
- isLocal,
- stream: isLocal ? fs.createReadStream : request,
- normalized: isLocal ? path.normalize(url) : url
- };
- };
-
- export const createIfNotExists = async (path: string) => {
- if (await new Promise<boolean>(resolve => fs.exists(path, resolve))) {
- return true;
- }
- return new Promise<boolean>(resolve => fs.mkdir(path, error => resolve(error === null)));
- };
-
- export const Destroy = (mediaPath: string) => new Promise<boolean>(resolve => fs.unlink(mediaPath, error => resolve(error === null)));
} \ No newline at end of file
diff --git a/src/server/apis/google/existing_uploads.json b/src/server/apis/google/existing_uploads.json
deleted file mode 100644
index e69de29bb..000000000
--- a/src/server/apis/google/existing_uploads.json
+++ /dev/null
diff --git a/src/server/credentials/google_docs_token.json b/src/server/credentials/google_docs_token.json
index ee44c3f30..8bd62bdfa 100644
--- a/src/server/credentials/google_docs_token.json
+++ b/src/server/credentials/google_docs_token.json
@@ -1 +1,7 @@
-{"access_token":"ya29.GlyKBznz91v_qb8RYt4PT40Hp106N08Yk64UjMAKllBsIqJQEzBkxLbB5q5paydywHzguQYSNup5fT7ojJTDU4CMZdPbPKGcjQz17w_CospcG-8Buz94KZptvlQ_pQ","refresh_token":"1/HTv_xFHszu2Nf3iiFrUTaeKzC_Vp2-6bpIB06xW_WHI","scope":"https://www.googleapis.com/auth/presentations.readonly https://www.googleapis.com/auth/documents.readonly https://www.googleapis.com/auth/drive.file https://www.googleapis.com/auth/documents https://www.googleapis.com/auth/photoslibrary https://www.googleapis.com/auth/photoslibrary.appendonly https://www.googleapis.com/auth/drive https://www.googleapis.com/auth/presentations https://www.googleapis.com/auth/photoslibrary.sharing","token_type":"Bearer","expiry_date":1569093749804} \ No newline at end of file
+{
+ "access_token": "ya29.ImCOBwXgckGbyHNLMX7r-13B5VDgxfzF5mQ7lFJ0FX5GF5EuAPBBN5_ijLnNLC4yw4xtFjJOkEtKiYr-60OIm4oOnowEJpZMyRGxFMy_Q8MTnzDpeN-7Di_baUzcu7m_KWM",
+ "refresh_token": "1/HTv_xFHszu2Nf3iiFrUTaeKzC_Vp2-6bpIB06xW_WHI",
+ "scope": "https://www.googleapis.com/auth/presentations.readonly https://www.googleapis.com/auth/documents.readonly https://www.googleapis.com/auth/drive.file https://www.googleapis.com/auth/documents https://www.googleapis.com/auth/photoslibrary https://www.googleapis.com/auth/photoslibrary.appendonly https://www.googleapis.com/auth/drive https://www.googleapis.com/auth/presentations https://www.googleapis.com/auth/photoslibrary.sharing",
+ "token_type": "Bearer",
+ "expiry_date": 1569366907812
+} \ No newline at end of file
diff --git a/src/server/database.ts b/src/server/database.ts
index a7254fb0c..ce29478ad 100644
--- a/src/server/database.ts
+++ b/src/server/database.ts
@@ -1,209 +1,267 @@
import * as mongodb from 'mongodb';
import { Transferable } from './Message';
+import { Opt } from '../new_fields/Doc';
+import { Utils } from '../Utils';
+import { DashUploadUtils } from './DashUploadUtils';
-export class Database {
- public static DocumentsCollection = 'documents';
- public static Instance = new Database();
- private MongoClient = mongodb.MongoClient;
- private url = 'mongodb://localhost:27017/Dash';
- private currentWrites: { [id: string]: Promise<void> } = {};
- private db?: mongodb.Db;
- private onConnect: (() => void)[] = [];
-
- constructor() {
- this.MongoClient.connect(this.url, (err, client) => {
- this.db = client.db();
- this.onConnect.forEach(fn => fn());
- });
- }
+export namespace Database {
- public update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = Database.DocumentsCollection) {
- if (this.db) {
- let collection = this.db.collection(collectionName);
- const prom = this.currentWrites[id];
- let newProm: Promise<void>;
- const run = (): Promise<void> => {
- return new Promise<void>(resolve => {
- collection.updateOne({ _id: id }, value, { upsert }
- , (err, res) => {
- if (this.currentWrites[id] === newProm) {
- delete this.currentWrites[id];
- }
- resolve();
- callback(err, res);
- });
- });
- };
- newProm = prom ? prom.then(run) : run();
- this.currentWrites[id] = newProm;
- } else {
- this.onConnect.push(() => this.update(id, value, callback, upsert, collectionName));
- }
- }
+ class Database {
+ public static DocumentsCollection = 'documents';
+ private MongoClient = mongodb.MongoClient;
+ private url = 'mongodb://localhost:27017/Dash';
+ private currentWrites: { [id: string]: Promise<void> } = {};
+ private db?: mongodb.Db;
+ private onConnect: (() => void)[] = [];
- public replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = Database.DocumentsCollection) {
- if (this.db) {
- let collection = this.db.collection(collectionName);
- const prom = this.currentWrites[id];
- let newProm: Promise<void>;
- const run = (): Promise<void> => {
- return new Promise<void>(resolve => {
- collection.replaceOne({ _id: id }, value, { upsert }
- , (err, res) => {
- if (this.currentWrites[id] === newProm) {
- delete this.currentWrites[id];
- }
- resolve();
- callback(err, res);
- });
- });
- };
- newProm = prom ? prom.then(run) : run();
- this.currentWrites[id] = newProm;
- } else {
- this.onConnect.push(() => this.replace(id, value, callback, upsert, collectionName));
+ constructor() {
+ this.MongoClient.connect(this.url, (err, client) => {
+ this.db = client.db();
+ this.onConnect.forEach(fn => fn());
+ });
}
- }
- public delete(query: any, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>;
- public delete(id: string, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>;
- public delete(id: any, collectionName = Database.DocumentsCollection) {
- if (typeof id === "string") {
- id = { _id: id };
- }
- if (this.db) {
- const db = this.db;
- return new Promise(res => db.collection(collectionName).deleteMany(id, (err, result) => res(result)));
- } else {
- return new Promise(res => this.onConnect.push(() => res(this.delete(id, collectionName))));
+ public update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = Database.DocumentsCollection) {
+ if (this.db) {
+ let collection = this.db.collection(collectionName);
+ const prom = this.currentWrites[id];
+ let newProm: Promise<void>;
+ const run = (): Promise<void> => {
+ return new Promise<void>(resolve => {
+ collection.updateOne({ _id: id }, value, { upsert }
+ , (err, res) => {
+ if (this.currentWrites[id] === newProm) {
+ delete this.currentWrites[id];
+ }
+ resolve();
+ callback(err, res);
+ });
+ });
+ };
+ newProm = prom ? prom.then(run) : run();
+ this.currentWrites[id] = newProm;
+ } else {
+ this.onConnect.push(() => this.update(id, value, callback, upsert, collectionName));
+ }
}
- }
- public deleteAll(collectionName = Database.DocumentsCollection): Promise<any> {
- return new Promise(res => {
+ public replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = Database.DocumentsCollection) {
if (this.db) {
- this.db.collection(collectionName).deleteMany({}, res);
+ let collection = this.db.collection(collectionName);
+ const prom = this.currentWrites[id];
+ let newProm: Promise<void>;
+ const run = (): Promise<void> => {
+ return new Promise<void>(resolve => {
+ collection.replaceOne({ _id: id }, value, { upsert }
+ , (err, res) => {
+ if (this.currentWrites[id] === newProm) {
+ delete this.currentWrites[id];
+ }
+ resolve();
+ callback(err, res);
+ });
+ });
+ };
+ newProm = prom ? prom.then(run) : run();
+ this.currentWrites[id] = newProm;
} else {
- this.onConnect.push(() => this.db && this.db.collection(collectionName).deleteMany({}, res));
+ this.onConnect.push(() => this.replace(id, value, callback, upsert, collectionName));
}
- });
- }
+ }
- public insert(value: any, collectionName = Database.DocumentsCollection) {
- if (this.db) {
- if ("id" in value) {
- value._id = value.id;
- delete value.id;
+ public delete(query: any, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>;
+ public delete(id: string, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>;
+ public delete(id: any, collectionName = Database.DocumentsCollection) {
+ if (typeof id === "string") {
+ id = { _id: id };
+ }
+ if (this.db) {
+ const db = this.db;
+ return new Promise(res => db.collection(collectionName).deleteMany(id, (err, result) => res(result)));
+ } else {
+ return new Promise(res => this.onConnect.push(() => res(this.delete(id, collectionName))));
}
- const id = value._id;
- const collection = this.db.collection(collectionName);
- const prom = this.currentWrites[id];
- let newProm: Promise<void>;
- const run = (): Promise<void> => {
- return new Promise<void>(resolve => {
- collection.insertOne(value, (err, res) => {
- if (this.currentWrites[id] === newProm) {
- delete this.currentWrites[id];
- }
- resolve();
- });
- });
- };
- newProm = prom ? prom.then(run) : run();
- this.currentWrites[id] = newProm;
- } else {
- this.onConnect.push(() => this.insert(value, collectionName));
}
- }
- public getDocument(id: string, fn: (result?: Transferable) => void, collectionName = Database.DocumentsCollection) {
- if (this.db) {
- this.db.collection(collectionName).findOne({ _id: id }, (err, result) => {
- if (result) {
- result.id = result._id;
- delete result._id;
- fn(result);
+ public async deleteAll(collectionName = Database.DocumentsCollection, persist = true): Promise<any> {
+ return new Promise(resolve => {
+ const executor = async (database: mongodb.Db) => {
+ if (persist) {
+ await database.collection(collectionName).deleteMany({});
+ } else {
+ await database.dropCollection(collectionName);
+ }
+ resolve();
+ };
+ if (this.db) {
+ executor(this.db);
} else {
- fn(undefined);
+ this.onConnect.push(() => this.db && executor(this.db));
}
});
- } else {
- this.onConnect.push(() => this.getDocument(id, fn, collectionName));
}
- }
- public getDocuments(ids: string[], fn: (result: Transferable[]) => void, collectionName = Database.DocumentsCollection) {
- if (this.db) {
- this.db.collection(collectionName).find({ _id: { "$in": ids } }).toArray((err, docs) => {
- if (err) {
- console.log(err.message);
- console.log(err.errmsg);
+ public async insert(value: any, collectionName = Database.DocumentsCollection) {
+ if (this.db) {
+ if ("id" in value) {
+ value._id = value.id;
+ delete value.id;
}
- fn(docs.map(doc => {
- doc.id = doc._id;
- delete doc._id;
- return doc;
- }));
- });
- } else {
- this.onConnect.push(() => this.getDocuments(ids, fn, collectionName));
+ const id = value._id;
+ const collection = this.db.collection(collectionName);
+ const prom = this.currentWrites[id];
+ let newProm: Promise<void>;
+ const run = (): Promise<void> => {
+ return new Promise<void>(resolve => {
+ collection.insertOne(value, (err, res) => {
+ if (this.currentWrites[id] === newProm) {
+ delete this.currentWrites[id];
+ }
+ resolve();
+ });
+ });
+ };
+ newProm = prom ? prom.then(run) : run();
+ this.currentWrites[id] = newProm;
+ return newProm;
+ } else {
+ this.onConnect.push(() => this.insert(value, collectionName));
+ }
}
- }
- public async visit(ids: string[], fn: (result: any) => string[], collectionName = "newDocuments"): Promise<void> {
- if (this.db) {
- const visited = new Set<string>();
- while (ids.length) {
- const count = Math.min(ids.length, 1000);
- const index = ids.length - count;
- const fetchIds = ids.splice(index, count).filter(id => !visited.has(id));
- if (!fetchIds.length) {
- continue;
- }
- const docs = await new Promise<{ [key: string]: any }[]>(res => Database.Instance.getDocuments(fetchIds, res, "newDocuments"));
- for (const doc of docs) {
- const id = doc.id;
- visited.add(id);
- ids.push(...fn(doc));
+ public getDocument(id: string, fn: (result?: Transferable) => void, collectionName = Database.DocumentsCollection) {
+ if (this.db) {
+ this.db.collection(collectionName).findOne({ _id: id }, (err, result) => {
+ if (result) {
+ result.id = result._id;
+ delete result._id;
+ fn(result);
+ } else {
+ fn(undefined);
+ }
+ });
+ } else {
+ this.onConnect.push(() => this.getDocument(id, fn, collectionName));
+ }
+ }
+
+ public getDocuments(ids: string[], fn: (result: Transferable[]) => void, collectionName = Database.DocumentsCollection) {
+ if (this.db) {
+ this.db.collection(collectionName).find({ _id: { "$in": ids } }).toArray((err, docs) => {
+ if (err) {
+ console.log(err.message);
+ console.log(err.errmsg);
+ }
+ fn(docs.map(doc => {
+ doc.id = doc._id;
+ delete doc._id;
+ return doc;
+ }));
+ });
+ } else {
+ this.onConnect.push(() => this.getDocuments(ids, fn, collectionName));
+ }
+ }
+
+ public async visit(ids: string[], fn: (result: any) => string[], collectionName = "newDocuments"): Promise<void> {
+ if (this.db) {
+ const visited = new Set<string>();
+ while (ids.length) {
+ const count = Math.min(ids.length, 1000);
+ const index = ids.length - count;
+ const fetchIds = ids.splice(index, count).filter(id => !visited.has(id));
+ if (!fetchIds.length) {
+ continue;
+ }
+ const docs = await new Promise<{ [key: string]: any }[]>(res => Instance.getDocuments(fetchIds, res, "newDocuments"));
+ for (const doc of docs) {
+ const id = doc.id;
+ visited.add(id);
+ ids.push(...fn(doc));
+ }
}
+
+ } else {
+ return new Promise(res => {
+ this.onConnect.push(() => {
+ this.visit(ids, fn, collectionName);
+ res();
+ });
+ });
}
+ }
- } else {
- return new Promise(res => {
- this.onConnect.push(() => {
- this.visit(ids, fn, collectionName);
- res();
+ public query(query: { [key: string]: any }, projection?: { [key: string]: 0 | 1 }, collectionName = "newDocuments"): Promise<mongodb.Cursor> {
+ if (this.db) {
+ let cursor = this.db.collection(collectionName).find(query);
+ if (projection) {
+ cursor = cursor.project(projection);
+ }
+ return Promise.resolve<mongodb.Cursor>(cursor);
+ } else {
+ return new Promise<mongodb.Cursor>(res => {
+ this.onConnect.push(() => res(this.query(query, projection, collectionName)));
});
- });
+ }
}
- }
- public query(query: { [key: string]: any }, projection?: { [key: string]: 0 | 1 }, collectionName = "newDocuments"): Promise<mongodb.Cursor> {
- if (this.db) {
- let cursor = this.db.collection(collectionName).find(query);
- if (projection) {
- cursor = cursor.project(projection);
+ public updateMany(query: any, update: any, collectionName = "newDocuments") {
+ if (this.db) {
+ const db = this.db;
+ return new Promise<mongodb.WriteOpResult>(res => db.collection(collectionName).update(query, update, (_, result) => res(result)));
+ } else {
+ return new Promise<mongodb.WriteOpResult>(res => {
+ this.onConnect.push(() => this.updateMany(query, update, collectionName).then(res));
+ });
}
- return Promise.resolve<mongodb.Cursor>(cursor);
- } else {
- return new Promise<mongodb.Cursor>(res => {
- this.onConnect.push(() => res(this.query(query, projection, collectionName)));
- });
}
- }
- public updateMany(query: any, update: any, collectionName = "newDocuments") {
- if (this.db) {
- const db = this.db;
- return new Promise<mongodb.WriteOpResult>(res => db.collection(collectionName).update(query, update, (_, result) => res(result)));
- } else {
- return new Promise<mongodb.WriteOpResult>(res => {
- this.onConnect.push(() => this.updateMany(query, update, collectionName).then(res));
- });
+ public print() {
+ console.log("db says hi!");
}
}
- public print() {
- console.log("db says hi!");
+ export const Instance = new Database();
+
+ export namespace Auxiliary {
+
+ export enum AuxiliaryCollections {
+ GooglePhotosUploadHistory = "UploadedFromGooglePhotos"
+ }
+
+ const GoogleAuthentication = "GoogleAuthentication";
+
+ const SanitizedSingletonQuery = async (query: { [key: string]: any }, collection: string) => {
+ const cursor = await Instance.query(query, undefined, collection);
+ const existing = (await cursor.toArray())[0];
+ if (existing) {
+ delete existing._id;
+ }
+ return existing;
+ };
+
+ export const QueryUploadHistory = async (contentSize: number): Promise<Opt<DashUploadUtils.UploadInformation>> => {
+ return SanitizedSingletonQuery({ contentSize }, AuxiliaryCollections.GooglePhotosUploadHistory);
+ };
+
+ export const LogUpload = async (information: DashUploadUtils.UploadInformation) => {
+ const bundle = {
+ _id: Utils.GenerateDeterministicGuid(String(information.contentSize!)),
+ ...information
+ };
+ return Instance.insert(bundle, AuxiliaryCollections.GooglePhotosUploadHistory);
+ };
+
+ export const DeleteAll = async (persist = false) => {
+ const collectionNames = Object.values(AuxiliaryCollections);
+ const pendingDeletions = collectionNames.map(name => Instance.deleteAll(name, persist));
+ return Promise.all(pendingDeletions);
+ };
+
+ export const FetchGoogleAuthenticationToken = async (userId: string) => {
+ return SanitizedSingletonQuery({ userId }, GoogleAuthentication);
+ };
+
}
-}
+
+} \ No newline at end of file
diff --git a/src/server/index.ts b/src/server/index.ts
index 4c4cb84d6..386ecce4d 100644
--- a/src/server/index.ts
+++ b/src/server/index.ts
@@ -41,13 +41,14 @@ var AdmZip = require('adm-zip');
import * as YoutubeApi from "./apis/youtube/youtubeApiSample";
import { Response } from 'express-serve-static-core';
import { GoogleApiServerUtils } from "./apis/google/GoogleApiServerUtils";
-import { GooglePhotosUploadUtils, DownloadUtils as UploadUtils } from './apis/google/GooglePhotosUploadUtils';
+import { GooglePhotosUploadUtils } from './apis/google/GooglePhotosUploadUtils';
const MongoStore = require('connect-mongo')(session);
const mongoose = require('mongoose');
const probe = require("probe-image-size");
import * as qs from 'query-string';
import { Opt } from '../new_fields/Doc';
import BatchedArray, { TimeUnit } from "array-batcher";
+import { DashUploadUtils } from './DashUploadUtils';
const download = (url: string, dest: fs.PathLike) => request.get(url).pipe(fs.createWriteStream(dest));
let youtubeApiKey: string;
@@ -581,8 +582,8 @@ app.post(
for (const key in files) {
const { type, path: location, name } = files[key];
const filename = path.basename(location);
- const metadata = await UploadUtils.InspectImage(uploadDirectory + filename);
- await UploadUtils.UploadImage(metadata, filename).catch(() => console.log(`Unable to process ${filename}`));
+ const metadata = await DashUploadUtils.InspectImage(uploadDirectory + filename);
+ await DashUploadUtils.UploadImage(metadata, filename).catch(() => console.log(`Unable to process ${filename}`));
results.push({ name, type, path: `/files/${filename}` });
}
_success(res, results);
@@ -809,7 +810,7 @@ const EndpointHandlerMap = new Map<GoogleApiServerUtils.Action, GoogleApiServerU
app.post(RouteStore.googleDocs + "/:sector/:action", (req, res) => {
let sector: GoogleApiServerUtils.Service = req.params.sector as GoogleApiServerUtils.Service;
let action: GoogleApiServerUtils.Action = req.params.action as GoogleApiServerUtils.Action;
- GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], { credentialsPath, tokenPath }).then(endpoint => {
+ GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], { credentialsPath, userId: req.body.userId }).then(endpoint => {
let handler = EndpointHandlerMap.get(action);
if (endpoint && handler) {
let execute = handler(endpoint, req.body).then(
@@ -823,7 +824,7 @@ app.post(RouteStore.googleDocs + "/:sector/:action", (req, res) => {
});
});
-app.get(RouteStore.googlePhotosAccessToken, (req, res) => GoogleApiServerUtils.RetrieveAccessToken({ credentialsPath, tokenPath }).then(token => res.send(token)));
+app.get(RouteStore.googlePhotosAccessToken, (req, res) => GoogleApiServerUtils.RetrieveAccessToken({ credentialsPath, userId: req.body.userId }).then(token => res.send(token)));
const tokenError = "Unable to successfully upload bytes for all images!";
const mediaError = "Unable to convert all uploaded bytes to media items!";
@@ -836,12 +837,13 @@ export interface NewMediaItem {
}
app.post(RouteStore.googlePhotosMediaUpload, async (req, res) => {
- const mediaInput: GooglePhotosUploadUtils.MediaInput[] = req.body.media;
- await GooglePhotosUploadUtils.initialize({ uploadDirectory, credentialsPath, tokenPath });
+ const { userId, media } = req.body;
+
+ await GooglePhotosUploadUtils.initialize({ credentialsPath, userId });
let failed = 0;
- const newMediaItems = await BatchedArray.from(mediaInput, { batchSize: 25 }).batchedMapInterval(
+ const newMediaItems = await BatchedArray.from<GooglePhotosUploadUtils.MediaInput>(media, { batchSize: 25 }).batchedMapInterval(
async (batch: GooglePhotosUploadUtils.MediaInput[]) => {
const newMediaItems: NewMediaItem[] = [];
for (let element of batch) {
@@ -879,31 +881,36 @@ const prefix = "google_photos_";
const downloadError = "Encountered an error while executing downloads.";
const requestError = "Unable to execute download: the body's media items were malformed.";
-app.get("/gapiCleanup", (req, res) => {
- write_text_file(file, "");
+app.get("/deleteWithAux", async (req, res) => {
+ await Database.Auxiliary.DeleteAll();
res.redirect(RouteStore.delete);
});
-const file = "./apis/google/existing_uploads.json";
+const UploadError = (count: number) => `Unable to upload ${count} images to Dash's server`;
app.post(RouteStore.googlePhotosMediaDownload, async (req, res) => {
const contents: { mediaItems: MediaItem[] } = req.body;
+ let failed = 0;
if (contents) {
- const completed: Opt<UploadUtils.UploadInformation>[] = [];
- const content = await read_text_file(file);
- let existing = content.length ? JSON.parse(content) : {};
+ const completed: Opt<DashUploadUtils.UploadInformation>[] = [];
for (let item of contents.mediaItems) {
- const { contentSize, ...attributes } = await UploadUtils.InspectImage(item.baseUrl);
- const found: UploadUtils.UploadInformation = existing[contentSize!];
+ const { contentSize, ...attributes } = await DashUploadUtils.InspectImage(item.baseUrl);
+ const found: Opt<DashUploadUtils.UploadInformation> = await Database.Auxiliary.QueryUploadHistory(contentSize!);
if (!found) {
- const upload = await UploadUtils.UploadImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error));
- upload && completed.push(existing[contentSize!] = upload);
+ const upload = await DashUploadUtils.UploadImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error));
+ if (upload) {
+ completed.push(upload);
+ await Database.Auxiliary.LogUpload(upload);
+ } else {
+ failed++;
+ }
} else {
completed.push(found);
}
}
- await write_text_file(file, JSON.stringify(existing));
- _success(res, completed);
- return;
+ if (failed) {
+ return _error(res, UploadError(failed));
+ }
+ return _success(res, completed);
}
_invalid(res, requestError);
});