From 6b6488be27a71d9dba0ae5959284ae9a18ae9230 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 19 Sep 2019 14:13:59 -0400 Subject: extensions fixes and tracking albums --- src/extensions/ArrayExtensions.ts | 341 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 341 insertions(+) create mode 100644 src/extensions/ArrayExtensions.ts (limited to 'src/extensions/ArrayExtensions.ts') diff --git a/src/extensions/ArrayExtensions.ts b/src/extensions/ArrayExtensions.ts new file mode 100644 index 000000000..1190aa48c --- /dev/null +++ b/src/extensions/ArrayExtensions.ts @@ -0,0 +1,341 @@ +interface BatchContext { + completedBatches: number; + remainingBatches: number; +} +type BatchConverterSync = (batch: I[], context: BatchContext) => O[]; +type BatchHandlerSync = (batch: I[], context: BatchContext) => void; +type BatchConverterAsync = (batch: I[], context: BatchContext) => Promise; +type BatchHandlerAsync = (batch: I[], context: BatchContext) => Promise; +type BatchConverter = BatchConverterSync | BatchConverterAsync; +type BatchHandler = BatchHandlerSync | BatchHandlerAsync; +type FixedBatcher = { batchSize: number } | { batchCount: number, mode?: Mode }; +interface ExecutorResult { + updated: A; + makeNextBatch: boolean; +} +interface PredicateBatcher { + executor: (element: I, accumulator: A) => ExecutorResult; + initial: A; + persistAccumulator?: boolean; +} +interface PredicateBatcherAsyncInterface { + executor: (element: I, accumulator: A) => Promise>; + initial: A; + persistAccumulator?: boolean; +} +type PredicateBatcherAsync = PredicateBatcher | PredicateBatcherAsyncInterface; +type Batcher = FixedBatcher | PredicateBatcher; +type BatcherAsync = Batcher | PredicateBatcherAsync; + +enum TimeUnit { + Milliseconds, + Seconds, + Minutes +} + +interface Interval { + magnitude: number; + unit: TimeUnit; +} + +enum Mode { + Balanced, + Even +} + +const convert = (interval: Interval) => { + const { magnitude, unit } = interval; + switch (unit) { + default: + case TimeUnit.Milliseconds: + return magnitude; + case TimeUnit.Seconds: + return magnitude * 1000; + case TimeUnit.Minutes: + return magnitude * 1000 * 60; + } +}; + +interface Array { + fixedBatch(batcher: FixedBatcher): T[][]; + predicateBatch(batcher: PredicateBatcher): T[][]; + predicateBatchAsync(batcher: PredicateBatcherAsync): Promise; + batch(batcher: Batcher): T[][]; + batchAsync(batcher: BatcherAsync): Promise; + + batchedForEach(batcher: Batcher, handler: BatchHandlerSync): void; + batchedMap(batcher: Batcher, handler: BatchConverterSync): O[]; + + batchedForEachAsync(batcher: Batcher, handler: BatchHandler): Promise; + batchedMapAsync(batcher: Batcher, handler: BatchConverter): Promise; + + batchedForEachInterval(batcher: Batcher, handler: BatchHandler, interval: Interval): Promise; + batchedMapInterval(batcher: Batcher, handler: BatchConverter, interval: Interval): Promise; + + lastElement(): T; +} + +module.exports.AssignArrayExtensions = function () { + Array.prototype.fixedBatch = module.exports.fixedBatch; + Array.prototype.predicateBatch = module.exports.predicateBatch; + Array.prototype.predicateBatchAsync = module.exports.predicateBatchAsync; + Array.prototype.batch = module.exports.batch; + Array.prototype.batchAsync = module.exports.batchAsync; + Array.prototype.batchedForEach = module.exports.batchedForEach; + Array.prototype.batchedMap = module.exports.batchedMap; + Array.prototype.batchedForEachAsync = module.exports.batchedForEachAsync; + Array.prototype.batchedMapAsync = module.exports.batchedMapAsync; + Array.prototype.batchedForEachInterval = module.exports.batchedForEachInterval; + Array.prototype.batchedMapInterval = module.exports.batchedMapInterval; + Array.prototype.lastElement = module.exports.lastElement; +}; + +module.exports.fixedBatch = function (batcher: FixedBatcher): T[][] { + const batches: T[][] = []; + const length = this.length; + let i = 0; + if ("batchSize" in batcher) { + const { batchSize } = batcher; + while (i < this.length) { + const cap = Math.min(i + batchSize, length); + batches.push(this.slice(i, i = cap)); + } + } else if ("batchCount" in batcher) { + let { batchCount, mode } = batcher; + const resolved = mode || Mode.Balanced; + if (batchCount < 1) { + throw new Error("Batch count must be a positive integer!"); + } + if (batchCount === 1) { + return [this]; + } + if (batchCount >= this.length) { + return this.map((element: T) => [element]); + } + + let length = this.length; + let size: number; + + if (length % batchCount === 0) { + size = Math.floor(length / batchCount); + while (i < length) { + batches.push(this.slice(i, i += size)); + } + } else if (resolved === Mode.Balanced) { + while (i < length) { + size = Math.ceil((length - i) / batchCount--); + batches.push(this.slice(i, i += size)); + } + } else { + batchCount--; + size = Math.floor(length / batchCount); + if (length % size === 0) { + size--; + } + while (i < size * batchCount) { + batches.push(this.slice(i, i += size)); + } + batches.push(this.slice(size * batchCount)); + } + } + return batches; +}; + +module.exports.predicateBatch = function (batcher: PredicateBatcher): T[][] { + const batches: T[][] = []; + let batch: T[] = []; + const { executor, initial, persistAccumulator } = batcher; + let accumulator = initial; + for (let element of this) { + const { updated, makeNextBatch } = executor(element, accumulator); + accumulator = updated; + if (!makeNextBatch) { + batch.push(element); + } else { + batches.push(batch); + batch = [element]; + if (!persistAccumulator) { + accumulator = initial; + } + } + } + batches.push(batch); + return batches; +}; + +module.exports.predicateBatchAsync = async function (batcher: PredicateBatcherAsync): Promise { + const batches: T[][] = []; + let batch: T[] = []; + const { executor, initial, persistAccumulator } = batcher; + let accumulator: A = initial; + for (let element of this) { + const { updated, makeNextBatch } = await executor(element, accumulator); + accumulator = updated; + if (!makeNextBatch) { + batch.push(element); + } else { + batches.push(batch); + batch = [element]; + if (!persistAccumulator) { + accumulator = initial; + } + } + } + batches.push(batch); + return batches; +}; + +module.exports.batch = function (batcher: Batcher): T[][] { + if ("executor" in batcher) { + return this.predicateBatch(batcher); + } else { + return this.fixedBatch(batcher); + } +}; + +module.exports.batchAsync = async function (batcher: BatcherAsync): Promise { + if ("executor" in batcher) { + return this.predicateBatchAsync(batcher); + } else { + return this.fixedBatch(batcher); + } +}; + +module.exports.batchedForEach = function (batcher: Batcher, handler: BatchHandlerSync): void { + if (this.length) { + let completed = 0; + const batches = this.batch(batcher); + const quota = batches.length; + for (let batch of batches) { + const context: BatchContext = { + completedBatches: completed, + remainingBatches: quota - completed, + }; + handler(batch, context); + completed++; + } + } +}; + +module.exports.batchedMap = function (batcher: Batcher, handler: BatchConverterSync): O[] { + if (!this.length) { + return []; + } + let collector: O[] = []; + let completed = 0; + const batches = this.batch(batcher); + const quota = batches.length; + for (let batch of batches) { + const context: BatchContext = { + completedBatches: completed, + remainingBatches: quota - completed, + }; + collector.push(...handler(batch, context)); + completed++; + } + return collector; +}; + +module.exports.batchedForEachAsync = async function (batcher: BatcherAsync, handler: BatchHandler): Promise { + if (this.length) { + let completed = 0; + const batches = await this.batchAsync(batcher); + const quota = batches.length; + for (let batch of batches) { + const context: BatchContext = { + completedBatches: completed, + remainingBatches: quota - completed, + }; + await handler(batch, context); + completed++; + } + } +}; + +module.exports.batchedMapAsync = async function (batcher: BatcherAsync, handler: BatchConverter): Promise { + if (!this.length) { + return []; + } + let collector: O[] = []; + let completed = 0; + const batches = await this.batchAsync(batcher); + const quota = batches.length; + for (let batch of batches) { + const context: BatchContext = { + completedBatches: completed, + remainingBatches: quota - completed, + }; + collector.push(...(await handler(batch, context))); + completed++; + } + return collector; +}; + +module.exports.batchedForEachInterval = async function (batcher: BatcherAsync, handler: BatchHandler, interval: Interval): Promise { + if (!this.length) { + return; + } + const batches = await this.batchAsync(batcher); + const quota = batches.length; + return new Promise(async resolve => { + const iterator = batches[Symbol.iterator](); + let completed = 0; + while (true) { + const next = iterator.next(); + await new Promise(resolve => { + setTimeout(async () => { + const batch = next.value; + const context: BatchContext = { + completedBatches: completed, + remainingBatches: quota - completed, + }; + await handler(batch, context); + resolve(); + }, convert(interval)); + }); + if (++completed === quota) { + break; + } + } + resolve(); + }); +}; + +module.exports.batchedMapInterval = async function (batcher: BatcherAsync, handler: BatchConverter, interval: Interval): Promise { + if (!this.length) { + return []; + } + let collector: O[] = []; + const batches = await this.batchAsync(batcher); + const quota = batches.length; + return new Promise(async resolve => { + const iterator = batches[Symbol.iterator](); + let completed = 0; + while (true) { + const next = iterator.next(); + await new Promise(resolve => { + setTimeout(async () => { + const batch = next.value; + const context: BatchContext = { + completedBatches: completed, + remainingBatches: quota - completed, + }; + collector.push(...(await handler(batch, context))); + resolve(); + }, convert(interval)); + }); + if (++completed === quota) { + resolve(collector); + break; + } + } + }); +}; + +module.exports.lastElement = function () { + if (!this.length) { + return undefined; + } + const last: T = this[this.length - 1]; + return last; +}; -- cgit v1.2.3-70-g09d2 From 3fb212eb601dbaff28a61100fc0478b1c706a3b8 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 19 Sep 2019 16:30:14 -0400 Subject: organized and genericized --- src/extensions/ArrayExtensions.ts | 537 +++++++++++----------- src/extensions/Extensions.ts | 4 +- src/extensions/StringExtensions.ts | 2 +- src/server/apis/google/GooglePhotosUploadUtils.ts | 6 +- src/server/index.ts | 8 +- 5 files changed, 270 insertions(+), 287 deletions(-) (limited to 'src/extensions/ArrayExtensions.ts') diff --git a/src/extensions/ArrayExtensions.ts b/src/extensions/ArrayExtensions.ts index 1190aa48c..70508f6df 100644 --- a/src/extensions/ArrayExtensions.ts +++ b/src/extensions/ArrayExtensions.ts @@ -1,61 +1,3 @@ -interface BatchContext { - completedBatches: number; - remainingBatches: number; -} -type BatchConverterSync = (batch: I[], context: BatchContext) => O[]; -type BatchHandlerSync = (batch: I[], context: BatchContext) => void; -type BatchConverterAsync = (batch: I[], context: BatchContext) => Promise; -type BatchHandlerAsync = (batch: I[], context: BatchContext) => Promise; -type BatchConverter = BatchConverterSync | BatchConverterAsync; -type BatchHandler = BatchHandlerSync | BatchHandlerAsync; -type FixedBatcher = { batchSize: number } | { batchCount: number, mode?: Mode }; -interface ExecutorResult { - updated: A; - makeNextBatch: boolean; -} -interface PredicateBatcher { - executor: (element: I, accumulator: A) => ExecutorResult; - initial: A; - persistAccumulator?: boolean; -} -interface PredicateBatcherAsyncInterface { - executor: (element: I, accumulator: A) => Promise>; - initial: A; - persistAccumulator?: boolean; -} -type PredicateBatcherAsync = PredicateBatcher | PredicateBatcherAsyncInterface; -type Batcher = FixedBatcher | PredicateBatcher; -type BatcherAsync = Batcher | PredicateBatcherAsync; - -enum TimeUnit { - Milliseconds, - Seconds, - Minutes -} - -interface Interval { - magnitude: number; - unit: TimeUnit; -} - -enum Mode { - Balanced, - Even -} - -const convert = (interval: Interval) => { - const { magnitude, unit } = interval; - switch (unit) { - default: - case TimeUnit.Milliseconds: - return magnitude; - case TimeUnit.Seconds: - return magnitude * 1000; - case TimeUnit.Minutes: - return magnitude * 1000 * 60; - } -}; - interface Array { fixedBatch(batcher: FixedBatcher): T[][]; predicateBatch(batcher: PredicateBatcher): T[][]; @@ -75,134 +17,185 @@ interface Array { lastElement(): T; } -module.exports.AssignArrayExtensions = function () { - Array.prototype.fixedBatch = module.exports.fixedBatch; - Array.prototype.predicateBatch = module.exports.predicateBatch; - Array.prototype.predicateBatchAsync = module.exports.predicateBatchAsync; - Array.prototype.batch = module.exports.batch; - Array.prototype.batchAsync = module.exports.batchAsync; - Array.prototype.batchedForEach = module.exports.batchedForEach; - Array.prototype.batchedMap = module.exports.batchedMap; - Array.prototype.batchedForEachAsync = module.exports.batchedForEachAsync; - Array.prototype.batchedMapAsync = module.exports.batchedMapAsync; - Array.prototype.batchedForEachInterval = module.exports.batchedForEachInterval; - Array.prototype.batchedMapInterval = module.exports.batchedMapInterval; - Array.prototype.lastElement = module.exports.lastElement; +interface BatchContext { + completedBatches: number; + remainingBatches: number; +} + +interface ExecutorResult { + updated: A; + makeNextBatch: boolean; +} + +interface PredicateBatcherCommon { + initial: A; + persistAccumulator?: boolean; +} + +interface Interval { + magnitude: number; + unit: typeof module.exports.TimeUnit; +} + +type BatchConverterSync = (batch: I[], context: BatchContext) => O[]; +type BatchHandlerSync = (batch: I[], context: BatchContext) => void; +type BatchConverterAsync = (batch: I[], context: BatchContext) => Promise; +type BatchHandlerAsync = (batch: I[], context: BatchContext) => Promise; +type BatchConverter = BatchConverterSync | BatchConverterAsync; +type BatchHandler = BatchHandlerSync | BatchHandlerAsync; + +type FixedBatcher = { batchSize: number } | { batchCount: number, mode?: typeof module.exports.Mode }; +type PredicateBatcher = PredicateBatcherCommon & { executor: (element: I, accumulator: A) => ExecutorResult }; +type PredicateBatcherAsync = PredicateBatcherCommon & { executor: (element: I, accumulator: A) => ExecutorResult | Promise> }; + +type Batcher = FixedBatcher | PredicateBatcher; +type BatcherAsync = Batcher | PredicateBatcherAsync; + +module.exports.Mode = { + Balanced: 0, + Even: 1 }; -module.exports.fixedBatch = function (batcher: FixedBatcher): T[][] { - const batches: T[][] = []; - const length = this.length; - let i = 0; - if ("batchSize" in batcher) { - const { batchSize } = batcher; - while (i < this.length) { - const cap = Math.min(i + batchSize, length); - batches.push(this.slice(i, i = cap)); - } - } else if ("batchCount" in batcher) { - let { batchCount, mode } = batcher; - const resolved = mode || Mode.Balanced; - if (batchCount < 1) { - throw new Error("Batch count must be a positive integer!"); - } - if (batchCount === 1) { - return [this]; - } - if (batchCount >= this.length) { - return this.map((element: T) => [element]); - } +module.exports.TimeUnit = { + Milliseconds: 0, + Seconds: 1, + Minutes: 2 +}; - let length = this.length; - let size: number; +module.exports.Assign = function () { - if (length % batchCount === 0) { - size = Math.floor(length / batchCount); - while (i < length) { - batches.push(this.slice(i, i += size)); + Array.prototype.fixedBatch = function (batcher: FixedBatcher): T[][] { + const batches: T[][] = []; + const length = this.length; + let i = 0; + if ("batchSize" in batcher) { + const { batchSize } = batcher; + while (i < this.length) { + const cap = Math.min(i + batchSize, length); + batches.push(this.slice(i, i = cap)); } - } else if (resolved === Mode.Balanced) { - while (i < length) { - size = Math.ceil((length - i) / batchCount--); - batches.push(this.slice(i, i += size)); + } else if ("batchCount" in batcher) { + let { batchCount, mode } = batcher; + const resolved = mode || module.exports.Mode.Balanced; + if (batchCount < 1) { + throw new Error("Batch count must be a positive integer!"); } - } else { - batchCount--; - size = Math.floor(length / batchCount); - if (length % size === 0) { - size--; + if (batchCount === 1) { + return [this]; } - while (i < size * batchCount) { - batches.push(this.slice(i, i += size)); + if (batchCount >= this.length) { + return this.map((element: T) => [element]); + } + + let length = this.length; + let size: number; + + if (length % batchCount === 0) { + size = Math.floor(length / batchCount); + while (i < length) { + batches.push(this.slice(i, i += size)); + } + } else if (resolved === module.exports.Mode.Balanced) { + while (i < length) { + size = Math.ceil((length - i) / batchCount--); + batches.push(this.slice(i, i += size)); + } + } else { + batchCount--; + size = Math.floor(length / batchCount); + if (length % size === 0) { + size--; + } + while (i < size * batchCount) { + batches.push(this.slice(i, i += size)); + } + batches.push(this.slice(size * batchCount)); } - batches.push(this.slice(size * batchCount)); } - } - return batches; -}; + return batches; + }; -module.exports.predicateBatch = function (batcher: PredicateBatcher): T[][] { - const batches: T[][] = []; - let batch: T[] = []; - const { executor, initial, persistAccumulator } = batcher; - let accumulator = initial; - for (let element of this) { - const { updated, makeNextBatch } = executor(element, accumulator); - accumulator = updated; - if (!makeNextBatch) { - batch.push(element); - } else { - batches.push(batch); - batch = [element]; - if (!persistAccumulator) { - accumulator = initial; + Array.prototype.predicateBatch = function (batcher: PredicateBatcher): T[][] { + const batches: T[][] = []; + let batch: T[] = []; + const { executor, initial, persistAccumulator } = batcher; + let accumulator = initial; + for (let element of this) { + const { updated, makeNextBatch } = executor(element, accumulator); + accumulator = updated; + if (!makeNextBatch) { + batch.push(element); + } else { + batches.push(batch); + batch = [element]; + if (!persistAccumulator) { + accumulator = initial; + } } } - } - batches.push(batch); - return batches; -}; + batches.push(batch); + return batches; + }; -module.exports.predicateBatchAsync = async function (batcher: PredicateBatcherAsync): Promise { - const batches: T[][] = []; - let batch: T[] = []; - const { executor, initial, persistAccumulator } = batcher; - let accumulator: A = initial; - for (let element of this) { - const { updated, makeNextBatch } = await executor(element, accumulator); - accumulator = updated; - if (!makeNextBatch) { - batch.push(element); - } else { - batches.push(batch); - batch = [element]; - if (!persistAccumulator) { - accumulator = initial; + Array.prototype.predicateBatchAsync = async function (batcher: PredicateBatcherAsync): Promise { + const batches: T[][] = []; + let batch: T[] = []; + const { executor, initial, persistAccumulator } = batcher; + let accumulator: A = initial; + for (let element of this) { + const { updated, makeNextBatch } = await executor(element, accumulator); + accumulator = updated; + if (!makeNextBatch) { + batch.push(element); + } else { + batches.push(batch); + batch = [element]; + if (!persistAccumulator) { + accumulator = initial; + } } } - } - batches.push(batch); - return batches; -}; + batches.push(batch); + return batches; + }; -module.exports.batch = function (batcher: Batcher): T[][] { - if ("executor" in batcher) { - return this.predicateBatch(batcher); - } else { - return this.fixedBatch(batcher); - } -}; + Array.prototype.batch = function (batcher: Batcher): T[][] { + if ("executor" in batcher) { + return this.predicateBatch(batcher); + } else { + return this.fixedBatch(batcher); + } + }; -module.exports.batchAsync = async function (batcher: BatcherAsync): Promise { - if ("executor" in batcher) { - return this.predicateBatchAsync(batcher); - } else { - return this.fixedBatch(batcher); - } -}; + Array.prototype.batchAsync = async function (batcher: BatcherAsync): Promise { + if ("executor" in batcher) { + return this.predicateBatchAsync(batcher); + } else { + return this.fixedBatch(batcher); + } + }; + + Array.prototype.batchedForEach = function (batcher: Batcher, handler: BatchHandlerSync): void { + if (this.length) { + let completed = 0; + const batches = this.batch(batcher); + const quota = batches.length; + for (let batch of batches) { + const context: BatchContext = { + completedBatches: completed, + remainingBatches: quota - completed, + }; + handler(batch, context); + completed++; + } + } + }; -module.exports.batchedForEach = function (batcher: Batcher, handler: BatchHandlerSync): void { - if (this.length) { + Array.prototype.batchedMap = function (batcher: Batcher, handler: BatchConverterSync): O[] { + if (!this.length) { + return []; + } + let collector: O[] = []; let completed = 0; const batches = this.batch(batcher); const quota = batches.length; @@ -211,33 +204,33 @@ module.exports.batchedForEach = function (batcher: Batcher, handler: completedBatches: completed, remainingBatches: quota - completed, }; - handler(batch, context); + collector.push(...handler(batch, context)); completed++; } - } -}; + return collector; + }; -module.exports.batchedMap = function (batcher: Batcher, handler: BatchConverterSync): O[] { - if (!this.length) { - return []; - } - let collector: O[] = []; - let completed = 0; - const batches = this.batch(batcher); - const quota = batches.length; - for (let batch of batches) { - const context: BatchContext = { - completedBatches: completed, - remainingBatches: quota - completed, - }; - collector.push(...handler(batch, context)); - completed++; - } - return collector; -}; + Array.prototype.batchedForEachAsync = async function (batcher: BatcherAsync, handler: BatchHandler): Promise { + if (this.length) { + let completed = 0; + const batches = await this.batchAsync(batcher); + const quota = batches.length; + for (let batch of batches) { + const context: BatchContext = { + completedBatches: completed, + remainingBatches: quota - completed, + }; + await handler(batch, context); + completed++; + } + } + }; -module.exports.batchedForEachAsync = async function (batcher: BatcherAsync, handler: BatchHandler): Promise { - if (this.length) { + Array.prototype.batchedMapAsync = async function (batcher: BatcherAsync, handler: BatchConverter): Promise { + if (!this.length) { + return []; + } + let collector: O[] = []; let completed = 0; const batches = await this.batchAsync(batcher); const quota = batches.length; @@ -246,96 +239,92 @@ module.exports.batchedForEachAsync = async function (batcher: BatcherAsync completedBatches: completed, remainingBatches: quota - completed, }; - await handler(batch, context); + collector.push(...(await handler(batch, context))); completed++; } - } -}; + return collector; + }; -module.exports.batchedMapAsync = async function (batcher: BatcherAsync, handler: BatchConverter): Promise { - if (!this.length) { - return []; - } - let collector: O[] = []; - let completed = 0; - const batches = await this.batchAsync(batcher); - const quota = batches.length; - for (let batch of batches) { - const context: BatchContext = { - completedBatches: completed, - remainingBatches: quota - completed, - }; - collector.push(...(await handler(batch, context))); - completed++; - } - return collector; -}; - -module.exports.batchedForEachInterval = async function (batcher: BatcherAsync, handler: BatchHandler, interval: Interval): Promise { - if (!this.length) { - return; - } - const batches = await this.batchAsync(batcher); - const quota = batches.length; - return new Promise(async resolve => { - const iterator = batches[Symbol.iterator](); - let completed = 0; - while (true) { - const next = iterator.next(); - await new Promise(resolve => { - setTimeout(async () => { - const batch = next.value; - const context: BatchContext = { - completedBatches: completed, - remainingBatches: quota - completed, - }; - await handler(batch, context); - resolve(); - }, convert(interval)); - }); - if (++completed === quota) { - break; - } + Array.prototype.batchedForEachInterval = async function (batcher: BatcherAsync, handler: BatchHandler, interval: Interval): Promise { + if (!this.length) { + return; } - resolve(); - }); -}; + const batches = await this.batchAsync(batcher); + const quota = batches.length; + return new Promise(async resolve => { + const iterator = batches[Symbol.iterator](); + let completed = 0; + while (true) { + const next = iterator.next(); + await new Promise(resolve => { + setTimeout(async () => { + const batch = next.value; + const context: BatchContext = { + completedBatches: completed, + remainingBatches: quota - completed, + }; + await handler(batch, context); + resolve(); + }, convert(interval)); + }); + if (++completed === quota) { + break; + } + } + resolve(); + }); + }; -module.exports.batchedMapInterval = async function (batcher: BatcherAsync, handler: BatchConverter, interval: Interval): Promise { - if (!this.length) { - return []; - } - let collector: O[] = []; - const batches = await this.batchAsync(batcher); - const quota = batches.length; - return new Promise(async resolve => { - const iterator = batches[Symbol.iterator](); - let completed = 0; - while (true) { - const next = iterator.next(); - await new Promise(resolve => { - setTimeout(async () => { - const batch = next.value; - const context: BatchContext = { - completedBatches: completed, - remainingBatches: quota - completed, - }; - collector.push(...(await handler(batch, context))); - resolve(); - }, convert(interval)); - }); - if (++completed === quota) { - resolve(collector); - break; + Array.prototype.batchedMapInterval = async function (batcher: BatcherAsync, handler: BatchConverter, interval: Interval): Promise { + if (!this.length) { + return []; + } + let collector: O[] = []; + const batches = await this.batchAsync(batcher); + const quota = batches.length; + return new Promise(async resolve => { + const iterator = batches[Symbol.iterator](); + let completed = 0; + while (true) { + const next = iterator.next(); + await new Promise(resolve => { + setTimeout(async () => { + const batch = next.value; + const context: BatchContext = { + completedBatches: completed, + remainingBatches: quota - completed, + }; + collector.push(...(await handler(batch, context))); + resolve(); + }, convert(interval)); + }); + if (++completed === quota) { + resolve(collector); + break; + } } + }); + }; + + Array.prototype.lastElement = function () { + if (!this.length) { + return undefined; } - }); + const last: T = this[this.length - 1]; + return last; + }; + }; -module.exports.lastElement = function () { - if (!this.length) { - return undefined; +const convert = (interval: Interval) => { + const { magnitude, unit } = interval; + switch (unit) { + default: + case module.exports.Mode.TimeUnit.Milliseconds: + return magnitude; + case module.exports.Mode.TimeUnit.Seconds: + return magnitude * 1000; + case module.exports.Mode.TimeUnit.Minutes: + return magnitude * 1000 * 60; } - const last: T = this[this.length - 1]; - return last; -}; +}; \ No newline at end of file diff --git a/src/extensions/Extensions.ts b/src/extensions/Extensions.ts index 774236ea4..1bcebd0e2 100644 --- a/src/extensions/Extensions.ts +++ b/src/extensions/Extensions.ts @@ -2,6 +2,6 @@ const ArrayExtensions = require("./ArrayExtensions"); const StringExtensions = require("./StringExtensions"); module.exports.AssignExtensions = function () { - ArrayExtensions.Assign; - StringExtensions.Assign; + ArrayExtensions.Assign(); + StringExtensions.Assign(); }; \ No newline at end of file diff --git a/src/extensions/StringExtensions.ts b/src/extensions/StringExtensions.ts index 2ef31ec84..4cdbdebf7 100644 --- a/src/extensions/StringExtensions.ts +++ b/src/extensions/StringExtensions.ts @@ -3,7 +3,7 @@ interface String { hasNewline(): boolean; } -module.exports.AssignStringExtensions = function () { +module.exports.Assign = function () { String.prototype.removeTrailingNewlines = function () { let sliced = this; diff --git a/src/server/apis/google/GooglePhotosUploadUtils.ts b/src/server/apis/google/GooglePhotosUploadUtils.ts index 7eaf8a8b7..fc6772ffd 100644 --- a/src/server/apis/google/GooglePhotosUploadUtils.ts +++ b/src/server/apis/google/GooglePhotosUploadUtils.ts @@ -5,9 +5,9 @@ import { Utils } from '../../../Utils'; import * as path from 'path'; import { Opt } from '../../../new_fields/Doc'; import * as sharp from 'sharp'; -import { MediaItemCreationResult, NewMediaItemResult } from './SharedTypes'; -import { NewMediaItem } from '../..'; -import { TimeUnit } from "../../index"; +import { MediaItemCreationResult } from './SharedTypes'; +import { NewMediaItem } from "../../index"; +const { TimeUnit } = require("../../../extensions/ArrayExtensions"); const uploadDirectory = path.join(__dirname, "../../public/files/"); diff --git a/src/server/index.ts b/src/server/index.ts index 2d6c99d8a..1f411ade2 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -837,12 +837,6 @@ export interface NewMediaItem { }; } -export enum TimeUnit { - Milliseconds, - Seconds, - Minutes -} - app.post(RouteStore.googlePhotosMediaUpload, async (req, res) => { const mediaInput: GooglePhotosUploadUtils.MediaInput[] = req.body.media; await GooglePhotosUploadUtils.initialize({ uploadDirectory, credentialsPath, tokenPath }); @@ -865,7 +859,7 @@ app.post(RouteStore.googlePhotosMediaUpload, async (req, res) => { return newMediaItems; }; const batcher = { batchSize: 25 }; - const interval = { magnitude: 100, unit: TimeUnit.Milliseconds }; + const interval = { magnitude: 100, unit: ArrayExtensions.TimeUnit.Milliseconds }; const newMediaItems = await mediaInput.batchedMapInterval(batcher, dispatchUpload, interval); -- cgit v1.2.3-70-g09d2 From f5559e8de6184669a5583f2f3f1daf6becc5b733 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 19 Sep 2019 16:47:09 -0400 Subject: tweaks --- src/extensions/ArrayExtensions.ts | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) (limited to 'src/extensions/ArrayExtensions.ts') diff --git a/src/extensions/ArrayExtensions.ts b/src/extensions/ArrayExtensions.ts index 70508f6df..ec19117c9 100644 --- a/src/extensions/ArrayExtensions.ts +++ b/src/extensions/ArrayExtensions.ts @@ -1,6 +1,6 @@ interface Array { fixedBatch(batcher: FixedBatcher): T[][]; - predicateBatch(batcher: PredicateBatcher): T[][]; + predicateBatch(batcher: PredicateBatcherSync): T[][]; predicateBatchAsync(batcher: PredicateBatcherAsync): Promise; batch(batcher: Batcher): T[][]; batchAsync(batcher: BatcherAsync): Promise; @@ -27,7 +27,7 @@ interface ExecutorResult { makeNextBatch: boolean; } -interface PredicateBatcherCommon { +interface PredicateBatcherCommon { initial: A; persistAccumulator?: boolean; } @@ -45,11 +45,12 @@ type BatchConverter = BatchConverterSync | BatchConverterAsync type BatchHandler = BatchHandlerSync | BatchHandlerAsync; type FixedBatcher = { batchSize: number } | { batchCount: number, mode?: typeof module.exports.Mode }; -type PredicateBatcher = PredicateBatcherCommon & { executor: (element: I, accumulator: A) => ExecutorResult }; -type PredicateBatcherAsync = PredicateBatcherCommon & { executor: (element: I, accumulator: A) => ExecutorResult | Promise> }; +type PredicateBatcherSync = PredicateBatcherCommon & { executor: (element: I, accumulator: A) => ExecutorResult }; +type PredicateBatcherAsync = PredicateBatcherCommon & { executor: (element: I, accumulator: A) => Promise> }; -type Batcher = FixedBatcher | PredicateBatcher; -type BatcherAsync = Batcher | PredicateBatcherAsync; +type BatcherSync = FixedBatcher | PredicateBatcherSync; +type BatcherAsync = FixedBatcher | PredicateBatcherAsync; +type Batcher = BatcherSync | BatcherAsync; module.exports.Mode = { Balanced: 0, @@ -115,7 +116,7 @@ module.exports.Assign = function () { return batches; }; - Array.prototype.predicateBatch = function (batcher: PredicateBatcher): T[][] { + Array.prototype.predicateBatch = function (batcher: PredicateBatcherSync): T[][] { const batches: T[][] = []; let batch: T[] = []; const { executor, initial, persistAccumulator } = batcher; @@ -159,7 +160,7 @@ module.exports.Assign = function () { return batches; }; - Array.prototype.batch = function (batcher: Batcher): T[][] { + Array.prototype.batch = function (batcher: BatcherSync): T[][] { if ("executor" in batcher) { return this.predicateBatch(batcher); } else { -- cgit v1.2.3-70-g09d2 From e2886348fd00ce270094dd33735fd1b5df631e38 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 19 Sep 2019 18:23:24 -0400 Subject: finalized type fixes --- src/extensions/ArrayExtensions.ts | 40 ++++++++++++++++++++------------------- 1 file changed, 21 insertions(+), 19 deletions(-) (limited to 'src/extensions/ArrayExtensions.ts') diff --git a/src/extensions/ArrayExtensions.ts b/src/extensions/ArrayExtensions.ts index ec19117c9..cc17a5207 100644 --- a/src/extensions/ArrayExtensions.ts +++ b/src/extensions/ArrayExtensions.ts @@ -3,7 +3,7 @@ interface Array { predicateBatch(batcher: PredicateBatcherSync): T[][]; predicateBatchAsync(batcher: PredicateBatcherAsync): Promise; batch(batcher: Batcher): T[][]; - batchAsync(batcher: BatcherAsync): Promise; + batchAsync(batcher: Batcher): Promise; batchedForEach(batcher: Batcher, handler: BatchHandlerSync): void; batchedMap(batcher: Batcher, handler: BatchConverterSync): O[]; @@ -38,19 +38,21 @@ interface Interval { } type BatchConverterSync = (batch: I[], context: BatchContext) => O[]; -type BatchHandlerSync = (batch: I[], context: BatchContext) => void; type BatchConverterAsync = (batch: I[], context: BatchContext) => Promise; -type BatchHandlerAsync = (batch: I[], context: BatchContext) => Promise; type BatchConverter = BatchConverterSync | BatchConverterAsync; + +type BatchHandlerSync = (batch: I[], context: BatchContext) => void; +type BatchHandlerAsync = (batch: I[], context: BatchContext) => Promise; type BatchHandler = BatchHandlerSync | BatchHandlerAsync; +type BatcherSync = FixedBatcher | PredicateBatcherSync; +type BatcherAsync = PredicateBatcherAsync; +type Batcher = BatcherSync | BatcherAsync; + type FixedBatcher = { batchSize: number } | { batchCount: number, mode?: typeof module.exports.Mode }; type PredicateBatcherSync = PredicateBatcherCommon & { executor: (element: I, accumulator: A) => ExecutorResult }; -type PredicateBatcherAsync = PredicateBatcherCommon & { executor: (element: I, accumulator: A) => Promise> }; +type PredicateBatcherAsync = PredicateBatcherCommon & { executorAsync: (element: I, accumulator: A) => Promise> }; -type BatcherSync = FixedBatcher | PredicateBatcherSync; -type BatcherAsync = FixedBatcher | PredicateBatcherAsync; -type Batcher = BatcherSync | BatcherAsync; module.exports.Mode = { Balanced: 0, @@ -138,13 +140,13 @@ module.exports.Assign = function () { return batches; }; - Array.prototype.predicateBatchAsync = async function (batcher: PredicateBatcherAsync): Promise { + Array.prototype.predicateBatchAsync = async function (batcher: BatcherAsync): Promise { const batches: T[][] = []; let batch: T[] = []; - const { executor, initial, persistAccumulator } = batcher; + const { executorAsync, initial, persistAccumulator } = batcher; let accumulator: A = initial; for (let element of this) { - const { updated, makeNextBatch } = await executor(element, accumulator); + const { updated, makeNextBatch } = await executorAsync(element, accumulator); accumulator = updated; if (!makeNextBatch) { batch.push(element); @@ -168,15 +170,15 @@ module.exports.Assign = function () { } }; - Array.prototype.batchAsync = async function (batcher: BatcherAsync): Promise { - if ("executor" in batcher) { + Array.prototype.batchAsync = async function (batcher: Batcher): Promise { + if ("executorAsync" in batcher) { return this.predicateBatchAsync(batcher); } else { - return this.fixedBatch(batcher); + return this.batch(batcher); } }; - Array.prototype.batchedForEach = function (batcher: Batcher, handler: BatchHandlerSync): void { + Array.prototype.batchedForEach = function (batcher: BatcherSync, handler: BatchHandlerSync): void { if (this.length) { let completed = 0; const batches = this.batch(batcher); @@ -192,7 +194,7 @@ module.exports.Assign = function () { } }; - Array.prototype.batchedMap = function (batcher: Batcher, handler: BatchConverterSync): O[] { + Array.prototype.batchedMap = function (batcher: BatcherSync, handler: BatchConverterSync): O[] { if (!this.length) { return []; } @@ -211,7 +213,7 @@ module.exports.Assign = function () { return collector; }; - Array.prototype.batchedForEachAsync = async function (batcher: BatcherAsync, handler: BatchHandler): Promise { + Array.prototype.batchedForEachAsync = async function (batcher: Batcher, handler: BatchHandler): Promise { if (this.length) { let completed = 0; const batches = await this.batchAsync(batcher); @@ -227,7 +229,7 @@ module.exports.Assign = function () { } }; - Array.prototype.batchedMapAsync = async function (batcher: BatcherAsync, handler: BatchConverter): Promise { + Array.prototype.batchedMapAsync = async function (batcher: Batcher, handler: BatchConverter): Promise { if (!this.length) { return []; } @@ -246,7 +248,7 @@ module.exports.Assign = function () { return collector; }; - Array.prototype.batchedForEachInterval = async function (batcher: BatcherAsync, handler: BatchHandler, interval: Interval): Promise { + Array.prototype.batchedForEachInterval = async function (batcher: Batcher, handler: BatchHandler, interval: Interval): Promise { if (!this.length) { return; } @@ -276,7 +278,7 @@ module.exports.Assign = function () { }); }; - Array.prototype.batchedMapInterval = async function (batcher: BatcherAsync, handler: BatchConverter, interval: Interval): Promise { + Array.prototype.batchedMapInterval = async function (batcher: Batcher, handler: BatchConverter, interval: Interval): Promise { if (!this.length) { return []; } -- cgit v1.2.3-70-g09d2 From 23a204acddaea801a5ea94fe4c61184c8d7b82e4 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 19 Sep 2019 18:39:07 -0400 Subject: final type fixes --- src/extensions/ArrayExtensions.ts | 6 +++--- src/server/credentials/google_docs_token.json | 2 +- src/server/index.ts | 2 ++ 3 files changed, 6 insertions(+), 4 deletions(-) (limited to 'src/extensions/ArrayExtensions.ts') diff --git a/src/extensions/ArrayExtensions.ts b/src/extensions/ArrayExtensions.ts index cc17a5207..097f2109d 100644 --- a/src/extensions/ArrayExtensions.ts +++ b/src/extensions/ArrayExtensions.ts @@ -323,11 +323,11 @@ const convert = (interval: Interval) => { const { magnitude, unit } = interval; switch (unit) { default: - case module.exports.Mode.TimeUnit.Milliseconds: + case module.exports.TimeUnit.Milliseconds: return magnitude; - case module.exports.Mode.TimeUnit.Seconds: + case module.exports.TimeUnit.Seconds: return magnitude * 1000; - case module.exports.Mode.TimeUnit.Minutes: + case module.exports.TimeUnit.Minutes: return magnitude * 1000 * 60; } }; \ No newline at end of file diff --git a/src/server/credentials/google_docs_token.json b/src/server/credentials/google_docs_token.json index a476498a8..7c49eed43 100644 --- a/src/server/credentials/google_docs_token.json +++ b/src/server/credentials/google_docs_token.json @@ -1 +1 @@ -{"access_token":"ya29.ImCIBw01ZtZwR2NI608a-TfejTTGAzAWICqX9QdfNcLHo4upydH3tvpR7l5YmEbyuH2CHjHSQW2QKAPU_zXSpGAo_ZjQE5iRqsP_VdlSDVCS_NyabpHNL5m-0tmdyZJ8Qoc","refresh_token":"1/HTv_xFHszu2Nf3iiFrUTaeKzC_Vp2-6bpIB06xW_WHI","scope":"https://www.googleapis.com/auth/presentations.readonly https://www.googleapis.com/auth/documents.readonly https://www.googleapis.com/auth/drive.file https://www.googleapis.com/auth/documents https://www.googleapis.com/auth/photoslibrary https://www.googleapis.com/auth/photoslibrary.appendonly https://www.googleapis.com/auth/drive https://www.googleapis.com/auth/presentations https://www.googleapis.com/auth/photoslibrary.sharing","token_type":"Bearer","expiry_date":1568919703546} \ No newline at end of file +{"access_token":"ya29.ImCJB1Y8Z8vgUH4vyYA9xwqvLg281kOQKfA8_AGs_EqF1VKQVWfZsMoYkPJN3QwJmIUxlzTO1N-ehUGIxu0Jq3kKR-zzW7rQIMgeQu32OHogK4kvFxpM7l7RNYRw_9x22I0","refresh_token":"1/HTv_xFHszu2Nf3iiFrUTaeKzC_Vp2-6bpIB06xW_WHI","scope":"https://www.googleapis.com/auth/presentations.readonly https://www.googleapis.com/auth/documents.readonly https://www.googleapis.com/auth/drive.file https://www.googleapis.com/auth/documents https://www.googleapis.com/auth/photoslibrary https://www.googleapis.com/auth/photoslibrary.appendonly https://www.googleapis.com/auth/drive https://www.googleapis.com/auth/presentations https://www.googleapis.com/auth/photoslibrary.sharing","token_type":"Bearer","expiry_date":1568935635717} \ No newline at end of file diff --git a/src/server/index.ts b/src/server/index.ts index 1f411ade2..e3e4221cf 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -48,6 +48,8 @@ const probe = require("probe-image-size"); import * as qs from 'query-string'; import { Opt } from '../new_fields/Doc'; const Extensions = require("../extensions/Extensions"); +const ArrayExtensions = require("../extensions/ArrayExtensions"); + const download = (url: string, dest: fs.PathLike) => request.get(url).pipe(fs.createWriteStream(dest)); let youtubeApiKey: string; -- cgit v1.2.3-70-g09d2 From 51d1972a2833c25222cd9c2aa7ba87ab56d1f6cf Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Thu, 19 Sep 2019 18:57:35 -0400 Subject: tweaked --- src/extensions/ArrayExtensions.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'src/extensions/ArrayExtensions.ts') diff --git a/src/extensions/ArrayExtensions.ts b/src/extensions/ArrayExtensions.ts index 097f2109d..872f107a7 100644 --- a/src/extensions/ArrayExtensions.ts +++ b/src/extensions/ArrayExtensions.ts @@ -2,11 +2,11 @@ interface Array { fixedBatch(batcher: FixedBatcher): T[][]; predicateBatch(batcher: PredicateBatcherSync): T[][]; predicateBatchAsync(batcher: PredicateBatcherAsync): Promise; - batch(batcher: Batcher): T[][]; + batch(batcher: BatcherSync): T[][]; batchAsync(batcher: Batcher): Promise; - batchedForEach(batcher: Batcher, handler: BatchHandlerSync): void; - batchedMap(batcher: Batcher, handler: BatchConverterSync): O[]; + batchedForEach(batcher: BatcherSync, handler: BatchHandlerSync): void; + batchedMap(batcher: BatcherSync, handler: BatchConverterSync): O[]; batchedForEachAsync(batcher: Batcher, handler: BatchHandler): Promise; batchedMapAsync(batcher: Batcher, handler: BatchConverter): Promise; -- cgit v1.2.3-70-g09d2 From d66b51213e448d5f4f37781389af488a3ac744c4 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Fri, 20 Sep 2019 05:10:21 -0400 Subject: factored out extensions into npm module --- package.json | 1 + .../util/Import & Export/DirectoryImportBox.tsx | 7 +- src/client/views/Main.tsx | 3 - src/extensions/ArrayExtensions.ts | 639 ++++++++++----------- src/extensions/Extensions.ts | 2 +- src/server/apis/google/GooglePhotosUploadUtils.ts | 8 +- src/server/credentials/google_docs_token.json | 2 +- src/server/index.ts | 10 +- 8 files changed, 326 insertions(+), 346 deletions(-) (limited to 'src/extensions/ArrayExtensions.ts') diff --git a/package.json b/package.json index f869713ba..b20c31a7a 100644 --- a/package.json +++ b/package.json @@ -114,6 +114,7 @@ "@types/youtube": "0.0.38", "adm-zip": "^0.4.13", "archiver": "^3.0.3", + "array-batcher": "^1.0.2", "async": "^2.6.2", "babel-runtime": "^6.26.0", "bcrypt-nodejs": "0.0.3", diff --git a/src/client/util/Import & Export/DirectoryImportBox.tsx b/src/client/util/Import & Export/DirectoryImportBox.tsx index e3958e3a4..762302bc8 100644 --- a/src/client/util/Import & Export/DirectoryImportBox.tsx +++ b/src/client/util/Import & Export/DirectoryImportBox.tsx @@ -20,6 +20,7 @@ import { listSpec } from "../../../new_fields/Schema"; import { GooglePhotos } from "../../apis/google_docs/GooglePhotosClientUtils"; import { SchemaHeaderField } from "../../../new_fields/SchemaHeaderField"; import "./DirectoryImportBox.scss"; +import { batchedMapAsync } from "array-batcher"; const unsupported = ["text/html", "text/plain"]; interface FileResponse { @@ -103,7 +104,7 @@ export default class DirectoryImportBox extends React.Component runInAction(() => this.phase = `Internal: uploading ${this.quota - this.completed} files to Dash...`); - const uploads = await validated.batchedMapAsync({ batchSize: 15 }, async batch => { + const uploads = await batchedMapAsync(validated, { batchSize: 15 }, async batch => { const formData = new FormData(); const parameters = { method: 'POST', body: formData }; @@ -113,9 +114,9 @@ export default class DirectoryImportBox extends React.Component formData.append(Utils.GenerateGuid(), file); }); - const responses = (await fetch(RouteStore.upload, parameters)).json(); + const responses = await (await fetch(RouteStore.upload, parameters)).json(); runInAction(() => this.completed += batch.length); - return responses; + return responses as FileResponse[]; }); await Promise.all(uploads.map(async upload => { diff --git a/src/client/views/Main.tsx b/src/client/views/Main.tsx index 53912550c..70d2235e6 100644 --- a/src/client/views/Main.tsx +++ b/src/client/views/Main.tsx @@ -7,9 +7,6 @@ import { Cast } from "../../new_fields/Types"; import { Doc, DocListCastAsync } from "../../new_fields/Doc"; import { List } from "../../new_fields/List"; import { DocServer } from "../DocServer"; -const Extensions = require("../../extensions/Extensions"); - -Extensions.AssignExtensions(); let swapDocs = async () => { let oldDoc = await Cast(CurrentUserUtils.UserDocument.linkManagerDoc, Doc); diff --git a/src/extensions/ArrayExtensions.ts b/src/extensions/ArrayExtensions.ts index 872f107a7..ca407862b 100644 --- a/src/extensions/ArrayExtensions.ts +++ b/src/extensions/ArrayExtensions.ts @@ -1,333 +1,318 @@ interface Array { - fixedBatch(batcher: FixedBatcher): T[][]; - predicateBatch(batcher: PredicateBatcherSync): T[][]; - predicateBatchAsync(batcher: PredicateBatcherAsync): Promise; - batch(batcher: BatcherSync): T[][]; - batchAsync(batcher: Batcher): Promise; - - batchedForEach(batcher: BatcherSync, handler: BatchHandlerSync): void; - batchedMap(batcher: BatcherSync, handler: BatchConverterSync): O[]; - - batchedForEachAsync(batcher: Batcher, handler: BatchHandler): Promise; - batchedMapAsync(batcher: Batcher, handler: BatchConverter): Promise; - - batchedForEachInterval(batcher: Batcher, handler: BatchHandler, interval: Interval): Promise; - batchedMapInterval(batcher: Batcher, handler: BatchConverter, interval: Interval): Promise; - lastElement(): T; } -interface BatchContext { - completedBatches: number; - remainingBatches: number; -} - -interface ExecutorResult { - updated: A; - makeNextBatch: boolean; -} - -interface PredicateBatcherCommon { - initial: A; - persistAccumulator?: boolean; -} - -interface Interval { - magnitude: number; - unit: typeof module.exports.TimeUnit; -} - -type BatchConverterSync = (batch: I[], context: BatchContext) => O[]; -type BatchConverterAsync = (batch: I[], context: BatchContext) => Promise; -type BatchConverter = BatchConverterSync | BatchConverterAsync; - -type BatchHandlerSync = (batch: I[], context: BatchContext) => void; -type BatchHandlerAsync = (batch: I[], context: BatchContext) => Promise; -type BatchHandler = BatchHandlerSync | BatchHandlerAsync; - -type BatcherSync = FixedBatcher | PredicateBatcherSync; -type BatcherAsync = PredicateBatcherAsync; -type Batcher = BatcherSync | BatcherAsync; - -type FixedBatcher = { batchSize: number } | { batchCount: number, mode?: typeof module.exports.Mode }; -type PredicateBatcherSync = PredicateBatcherCommon & { executor: (element: I, accumulator: A) => ExecutorResult }; -type PredicateBatcherAsync = PredicateBatcherCommon & { executorAsync: (element: I, accumulator: A) => Promise> }; - - -module.exports.Mode = { - Balanced: 0, - Even: 1 -}; - -module.exports.TimeUnit = { - Milliseconds: 0, - Seconds: 1, - Minutes: 2 -}; - -module.exports.Assign = function () { - - Array.prototype.fixedBatch = function (batcher: FixedBatcher): T[][] { - const batches: T[][] = []; - const length = this.length; - let i = 0; - if ("batchSize" in batcher) { - const { batchSize } = batcher; - while (i < this.length) { - const cap = Math.min(i + batchSize, length); - batches.push(this.slice(i, i = cap)); - } - } else if ("batchCount" in batcher) { - let { batchCount, mode } = batcher; - const resolved = mode || module.exports.Mode.Balanced; - if (batchCount < 1) { - throw new Error("Batch count must be a positive integer!"); - } - if (batchCount === 1) { - return [this]; - } - if (batchCount >= this.length) { - return this.map((element: T) => [element]); - } - - let length = this.length; - let size: number; - - if (length % batchCount === 0) { - size = Math.floor(length / batchCount); - while (i < length) { - batches.push(this.slice(i, i += size)); - } - } else if (resolved === module.exports.Mode.Balanced) { - while (i < length) { - size = Math.ceil((length - i) / batchCount--); - batches.push(this.slice(i, i += size)); - } - } else { - batchCount--; - size = Math.floor(length / batchCount); - if (length % size === 0) { - size--; - } - while (i < size * batchCount) { - batches.push(this.slice(i, i += size)); - } - batches.push(this.slice(size * batchCount)); - } - } - return batches; - }; - - Array.prototype.predicateBatch = function (batcher: PredicateBatcherSync): T[][] { - const batches: T[][] = []; - let batch: T[] = []; - const { executor, initial, persistAccumulator } = batcher; - let accumulator = initial; - for (let element of this) { - const { updated, makeNextBatch } = executor(element, accumulator); - accumulator = updated; - if (!makeNextBatch) { - batch.push(element); - } else { - batches.push(batch); - batch = [element]; - if (!persistAccumulator) { - accumulator = initial; - } - } - } - batches.push(batch); - return batches; - }; - - Array.prototype.predicateBatchAsync = async function (batcher: BatcherAsync): Promise { - const batches: T[][] = []; - let batch: T[] = []; - const { executorAsync, initial, persistAccumulator } = batcher; - let accumulator: A = initial; - for (let element of this) { - const { updated, makeNextBatch } = await executorAsync(element, accumulator); - accumulator = updated; - if (!makeNextBatch) { - batch.push(element); - } else { - batches.push(batch); - batch = [element]; - if (!persistAccumulator) { - accumulator = initial; - } - } - } - batches.push(batch); - return batches; - }; - - Array.prototype.batch = function (batcher: BatcherSync): T[][] { - if ("executor" in batcher) { - return this.predicateBatch(batcher); - } else { - return this.fixedBatch(batcher); - } - }; - - Array.prototype.batchAsync = async function (batcher: Batcher): Promise { - if ("executorAsync" in batcher) { - return this.predicateBatchAsync(batcher); - } else { - return this.batch(batcher); - } - }; - - Array.prototype.batchedForEach = function (batcher: BatcherSync, handler: BatchHandlerSync): void { - if (this.length) { - let completed = 0; - const batches = this.batch(batcher); - const quota = batches.length; - for (let batch of batches) { - const context: BatchContext = { - completedBatches: completed, - remainingBatches: quota - completed, - }; - handler(batch, context); - completed++; - } - } - }; - - Array.prototype.batchedMap = function (batcher: BatcherSync, handler: BatchConverterSync): O[] { - if (!this.length) { - return []; - } - let collector: O[] = []; - let completed = 0; - const batches = this.batch(batcher); - const quota = batches.length; - for (let batch of batches) { - const context: BatchContext = { - completedBatches: completed, - remainingBatches: quota - completed, - }; - collector.push(...handler(batch, context)); - completed++; - } - return collector; - }; - - Array.prototype.batchedForEachAsync = async function (batcher: Batcher, handler: BatchHandler): Promise { - if (this.length) { - let completed = 0; - const batches = await this.batchAsync(batcher); - const quota = batches.length; - for (let batch of batches) { - const context: BatchContext = { - completedBatches: completed, - remainingBatches: quota - completed, - }; - await handler(batch, context); - completed++; - } - } - }; - - Array.prototype.batchedMapAsync = async function (batcher: Batcher, handler: BatchConverter): Promise { - if (!this.length) { - return []; - } - let collector: O[] = []; - let completed = 0; - const batches = await this.batchAsync(batcher); - const quota = batches.length; - for (let batch of batches) { - const context: BatchContext = { - completedBatches: completed, - remainingBatches: quota - completed, - }; - collector.push(...(await handler(batch, context))); - completed++; - } - return collector; - }; - - Array.prototype.batchedForEachInterval = async function (batcher: Batcher, handler: BatchHandler, interval: Interval): Promise { - if (!this.length) { - return; - } - const batches = await this.batchAsync(batcher); - const quota = batches.length; - return new Promise(async resolve => { - const iterator = batches[Symbol.iterator](); - let completed = 0; - while (true) { - const next = iterator.next(); - await new Promise(resolve => { - setTimeout(async () => { - const batch = next.value; - const context: BatchContext = { - completedBatches: completed, - remainingBatches: quota - completed, - }; - await handler(batch, context); - resolve(); - }, convert(interval)); - }); - if (++completed === quota) { - break; - } - } - resolve(); - }); - }; - - Array.prototype.batchedMapInterval = async function (batcher: Batcher, handler: BatchConverter, interval: Interval): Promise { - if (!this.length) { - return []; - } - let collector: O[] = []; - const batches = await this.batchAsync(batcher); - const quota = batches.length; - return new Promise(async resolve => { - const iterator = batches[Symbol.iterator](); - let completed = 0; - while (true) { - const next = iterator.next(); - await new Promise(resolve => { - setTimeout(async () => { - const batch = next.value; - const context: BatchContext = { - completedBatches: completed, - remainingBatches: quota - completed, - }; - collector.push(...(await handler(batch, context))); - resolve(); - }, convert(interval)); - }); - if (++completed === quota) { - resolve(collector); - break; - } - } - }); - }; - - Array.prototype.lastElement = function () { - if (!this.length) { - return undefined; - } - const last: T = this[this.length - 1]; - return last; - }; - +// interface BatchContext { +// completedBatches: number; +// remainingBatches: number; +// } + +// interface ExecutorResult { +// updated: A; +// makeNextBatch: boolean; +// } + +// interface PredicateBatcherCommon { +// initial: A; +// persistAccumulator?: boolean; +// } + +// interface Interval { +// magnitude: number; +// unit: typeof module.exports.TimeUnit; +// } + +// type BatchConverterSync = (batch: I[], context: BatchContext) => O[]; +// type BatchConverterAsync = (batch: I[], context: BatchContext) => Promise; +// type BatchConverter = BatchConverterSync | BatchConverterAsync; + +// type BatchHandlerSync = (batch: I[], context: BatchContext) => void; +// type BatchHandlerAsync = (batch: I[], context: BatchContext) => Promise; +// type BatchHandler = BatchHandlerSync | BatchHandlerAsync; + +// type BatcherSync = FixedBatcher | PredicateBatcherSync; +// type BatcherAsync = PredicateBatcherAsync; +// type Batcher = BatcherSync | BatcherAsync; + +// type FixedBatcher = { batchSize: number } | { batchCount: number, mode?: typeof module.exports.Mode }; +// type PredicateBatcherSync = PredicateBatcherCommon & { executor: (element: I, accumulator: A) => ExecutorResult }; +// type PredicateBatcherAsync = PredicateBatcherCommon & { executorAsync: (element: I, accumulator: A) => Promise> }; + + +// module.exports.Mode = { +// Balanced: 0, +// Even: 1 +// }; + +// module.exports.TimeUnit = { +// Milliseconds: 0, +// Seconds: 1, +// Minutes: 2 +// }; + +// module.exports.Assign = function () { + +// Array.prototype.fixedBatch = function (batcher: FixedBatcher): T[][] { +// const batches: T[][] = []; +// const length = this.length; +// let i = 0; +// if ("batchSize" in batcher) { +// const { batchSize } = batcher; +// while (i < this.length) { +// const cap = Math.min(i + batchSize, length); +// batches.push(this.slice(i, i = cap)); +// } +// } else if ("batchCount" in batcher) { +// let { batchCount, mode } = batcher; +// const resolved = mode || module.exports.Mode.Balanced; +// if (batchCount < 1) { +// throw new Error("Batch count must be a positive integer!"); +// } +// if (batchCount === 1) { +// return [this]; +// } +// if (batchCount >= this.length) { +// return this.map((element: T) => [element]); +// } + +// let length = this.length; +// let size: number; + +// if (length % batchCount === 0) { +// size = Math.floor(length / batchCount); +// while (i < length) { +// batches.push(this.slice(i, i += size)); +// } +// } else if (resolved === module.exports.Mode.Balanced) { +// while (i < length) { +// size = Math.ceil((length - i) / batchCount--); +// batches.push(this.slice(i, i += size)); +// } +// } else { +// batchCount--; +// size = Math.floor(length / batchCount); +// if (length % size === 0) { +// size--; +// } +// while (i < size * batchCount) { +// batches.push(this.slice(i, i += size)); +// } +// batches.push(this.slice(size * batchCount)); +// } +// } +// return batches; +// }; + +// Array.prototype.predicateBatch = function (batcher: PredicateBatcherSync): T[][] { +// const batches: T[][] = []; +// let batch: T[] = []; +// const { executor, initial, persistAccumulator } = batcher; +// let accumulator = initial; +// for (let element of this) { +// const { updated, makeNextBatch } = executor(element, accumulator); +// accumulator = updated; +// if (!makeNextBatch) { +// batch.push(element); +// } else { +// batches.push(batch); +// batch = [element]; +// if (!persistAccumulator) { +// accumulator = initial; +// } +// } +// } +// batches.push(batch); +// return batches; +// }; + +// Array.prototype.predicateBatchAsync = async function (batcher: BatcherAsync): Promise { +// const batches: T[][] = []; +// let batch: T[] = []; +// const { executorAsync, initial, persistAccumulator } = batcher; +// let accumulator: A = initial; +// for (let element of this) { +// const { updated, makeNextBatch } = await executorAsync(element, accumulator); +// accumulator = updated; +// if (!makeNextBatch) { +// batch.push(element); +// } else { +// batches.push(batch); +// batch = [element]; +// if (!persistAccumulator) { +// accumulator = initial; +// } +// } +// } +// batches.push(batch); +// return batches; +// }; + +// Array.prototype.batch = function (batcher: BatcherSync): T[][] { +// if ("executor" in batcher) { +// return this.predicateBatch(batcher); +// } else { +// return this.fixedBatch(batcher); +// } +// }; + +// Array.prototype.batchAsync = async function (batcher: Batcher): Promise { +// if ("executorAsync" in batcher) { +// return this.predicateBatchAsync(batcher); +// } else { +// return this.batch(batcher); +// } +// }; + +// Array.prototype.batchedForEach = function (batcher: BatcherSync, handler: BatchHandlerSync): void { +// if (this.length) { +// let completed = 0; +// const batches = this.batch(batcher); +// const quota = batches.length; +// for (let batch of batches) { +// const context: BatchContext = { +// completedBatches: completed, +// remainingBatches: quota - completed, +// }; +// handler(batch, context); +// completed++; +// } +// } +// }; + +// Array.prototype.batchedMap = function (batcher: BatcherSync, handler: BatchConverterSync): O[] { +// if (!this.length) { +// return []; +// } +// let collector: O[] = []; +// let completed = 0; +// const batches = this.batch(batcher); +// const quota = batches.length; +// for (let batch of batches) { +// const context: BatchContext = { +// completedBatches: completed, +// remainingBatches: quota - completed, +// }; +// collector.push(...handler(batch, context)); +// completed++; +// } +// return collector; +// }; + +// Array.prototype.batchedForEachAsync = async function (batcher: Batcher, handler: BatchHandler): Promise { +// if (this.length) { +// let completed = 0; +// const batches = await this.batchAsync(batcher); +// const quota = batches.length; +// for (let batch of batches) { +// const context: BatchContext = { +// completedBatches: completed, +// remainingBatches: quota - completed, +// }; +// await handler(batch, context); +// completed++; +// } +// } +// }; + +// Array.prototype.batchedMapAsync = async function (batcher: Batcher, handler: BatchConverter): Promise { +// if (!this.length) { +// return []; +// } +// let collector: O[] = []; +// let completed = 0; +// const batches = await this.batchAsync(batcher); +// const quota = batches.length; +// for (let batch of batches) { +// const context: BatchContext = { +// completedBatches: completed, +// remainingBatches: quota - completed, +// }; +// collector.push(...(await handler(batch, context))); +// completed++; +// } +// return collector; +// }; + +// Array.prototype.batchedForEachInterval = async function (batcher: Batcher, handler: BatchHandler, interval: Interval): Promise { +// if (!this.length) { +// return; +// } +// const batches = await this.batchAsync(batcher); +// const quota = batches.length; +// return new Promise(async resolve => { +// const iterator = batches[Symbol.iterator](); +// let completed = 0; +// while (true) { +// const next = iterator.next(); +// await new Promise(resolve => { +// setTimeout(async () => { +// const batch = next.value; +// const context: BatchContext = { +// completedBatches: completed, +// remainingBatches: quota - completed, +// }; +// await handler(batch, context); +// resolve(); +// }, convert(interval)); +// }); +// if (++completed === quota) { +// break; +// } +// } +// resolve(); +// }); +// }; + +// Array.prototype.batchedMapInterval = async function (batcher: Batcher, handler: BatchConverter, interval: Interval): Promise { +// if (!this.length) { +// return []; +// } +// let collector: O[] = []; +// const batches = await this.batchAsync(batcher); +// const quota = batches.length; +// return new Promise(async resolve => { +// const iterator = batches[Symbol.iterator](); +// let completed = 0; +// while (true) { +// const next = iterator.next(); +// await new Promise(resolve => { +// setTimeout(async () => { +// const batch = next.value; +// const context: BatchContext = { +// completedBatches: completed, +// remainingBatches: quota - completed, +// }; +// collector.push(...(await handler(batch, context))); +// resolve(); +// }, convert(interval)); +// }); +// if (++completed === quota) { +// resolve(collector); +// break; +// } +// } +// }); +// }; + +Array.prototype.lastElement = function () { + if (!this.length) { + return undefined; + } + const last: T = this[this.length - 1]; + return last; }; -const convert = (interval: Interval) => { - const { magnitude, unit } = interval; - switch (unit) { - default: - case module.exports.TimeUnit.Milliseconds: - return magnitude; - case module.exports.TimeUnit.Seconds: - return magnitude * 1000; - case module.exports.TimeUnit.Minutes: - return magnitude * 1000 * 60; - } -}; \ No newline at end of file +// }; + +// const convert = (interval: Interval) => { +// const { magnitude, unit } = interval; +// switch (unit) { +// default: +// case module.exports.TimeUnit.Milliseconds: +// return magnitude; +// case module.exports.TimeUnit.Seconds: +// return magnitude * 1000; +// case module.exports.TimeUnit.Minutes: +// return magnitude * 1000 * 60; +// } +// }; \ No newline at end of file diff --git a/src/extensions/Extensions.ts b/src/extensions/Extensions.ts index 1bcebd0e2..1391140b9 100644 --- a/src/extensions/Extensions.ts +++ b/src/extensions/Extensions.ts @@ -2,6 +2,6 @@ const ArrayExtensions = require("./ArrayExtensions"); const StringExtensions = require("./StringExtensions"); module.exports.AssignExtensions = function () { - ArrayExtensions.Assign(); + // ArrayExtensions.Assign(); StringExtensions.Assign(); }; \ No newline at end of file diff --git a/src/server/apis/google/GooglePhotosUploadUtils.ts b/src/server/apis/google/GooglePhotosUploadUtils.ts index fc6772ffd..29575763c 100644 --- a/src/server/apis/google/GooglePhotosUploadUtils.ts +++ b/src/server/apis/google/GooglePhotosUploadUtils.ts @@ -7,7 +7,7 @@ import { Opt } from '../../../new_fields/Doc'; import * as sharp from 'sharp'; import { MediaItemCreationResult } from './SharedTypes'; import { NewMediaItem } from "../../index"; -const { TimeUnit } = require("../../../extensions/ArrayExtensions"); +import { batchedMapInterval, FixedBatcher, TimeUnit, Interval } from "array-batcher"; const uploadDirectory = path.join(__dirname, "../../public/files/"); @@ -81,10 +81,10 @@ export namespace GooglePhotosUploadUtils { }); })).newMediaItemResults; }; - const batcher = { batchSize: 50 }; - const interval = { magnitude: 100, unit: TimeUnit.Milliseconds }; + const batcher: FixedBatcher = { batchSize: 50 }; + const interval: Interval = { magnitude: 100, unit: TimeUnit.Milliseconds }; - const newMediaItemResults = await newMediaItems.batchedMapInterval(batcher, createFromUploadTokens, interval); + const newMediaItemResults = await batchedMapInterval(newMediaItems, batcher, createFromUploadTokens, interval); return { newMediaItemResults }; }; diff --git a/src/server/credentials/google_docs_token.json b/src/server/credentials/google_docs_token.json index 7c49eed43..cdea139a3 100644 --- a/src/server/credentials/google_docs_token.json +++ b/src/server/credentials/google_docs_token.json @@ -1 +1 @@ -{"access_token":"ya29.ImCJB1Y8Z8vgUH4vyYA9xwqvLg281kOQKfA8_AGs_EqF1VKQVWfZsMoYkPJN3QwJmIUxlzTO1N-ehUGIxu0Jq3kKR-zzW7rQIMgeQu32OHogK4kvFxpM7l7RNYRw_9x22I0","refresh_token":"1/HTv_xFHszu2Nf3iiFrUTaeKzC_Vp2-6bpIB06xW_WHI","scope":"https://www.googleapis.com/auth/presentations.readonly https://www.googleapis.com/auth/documents.readonly https://www.googleapis.com/auth/drive.file https://www.googleapis.com/auth/documents https://www.googleapis.com/auth/photoslibrary https://www.googleapis.com/auth/photoslibrary.appendonly https://www.googleapis.com/auth/drive https://www.googleapis.com/auth/presentations https://www.googleapis.com/auth/photoslibrary.sharing","token_type":"Bearer","expiry_date":1568935635717} \ No newline at end of file +{"access_token":"ya29.ImCJB_jd-XlGcIHAHgN2Zl3BWQ6sMHdeMMuRxU6sPCbAYIT8hXws-WDmQf65ZY1f-0d3y7HcCcuOxtZJ_0IcBb1-yIBxiOf3VJWPmvjGiJQq_mANGVSSmsBHhqpIaYkeQN0","refresh_token":"1/HTv_xFHszu2Nf3iiFrUTaeKzC_Vp2-6bpIB06xW_WHI","scope":"https://www.googleapis.com/auth/presentations.readonly https://www.googleapis.com/auth/documents.readonly https://www.googleapis.com/auth/drive.file https://www.googleapis.com/auth/documents https://www.googleapis.com/auth/photoslibrary https://www.googleapis.com/auth/photoslibrary.appendonly https://www.googleapis.com/auth/drive https://www.googleapis.com/auth/presentations https://www.googleapis.com/auth/photoslibrary.sharing","token_type":"Bearer","expiry_date":1568973665276} \ No newline at end of file diff --git a/src/server/index.ts b/src/server/index.ts index e3e4221cf..e03079d66 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -47,9 +47,7 @@ const mongoose = require('mongoose'); const probe = require("probe-image-size"); import * as qs from 'query-string'; import { Opt } from '../new_fields/Doc'; -const Extensions = require("../extensions/Extensions"); -const ArrayExtensions = require("../extensions/ArrayExtensions"); - +import { batchedMapInterval, TimeUnit } from "array-batcher"; const download = (url: string, dest: fs.PathLike) => request.get(url).pipe(fs.createWriteStream(dest)); let youtubeApiKey: string; @@ -101,8 +99,6 @@ enum Method { POST } -Extensions.AssignExtensions(); - /** * Please invoke this function when adding a new route to Dash's server. * It ensures that any requests leading to or containing user-sensitive information @@ -861,9 +857,9 @@ app.post(RouteStore.googlePhotosMediaUpload, async (req, res) => { return newMediaItems; }; const batcher = { batchSize: 25 }; - const interval = { magnitude: 100, unit: ArrayExtensions.TimeUnit.Milliseconds }; + const interval = { magnitude: 100, unit: TimeUnit.Milliseconds }; - const newMediaItems = await mediaInput.batchedMapInterval(batcher, dispatchUpload, interval); + const newMediaItems = await batchedMapInterval(mediaInput, batcher, dispatchUpload, interval); if (failed) { return _error(res, tokenError); -- cgit v1.2.3-70-g09d2 From 9b27f1ace4655f71a67ad68e1f6f6bba82f41e46 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 24 Sep 2019 19:09:33 -0400 Subject: now store cache in mongodb collection, fixed extension as --- src/Utils.ts | 6 +- .../apis/google_docs/GooglePhotosClientUtils.ts | 8 +- src/client/views/Main.tsx | 3 + src/extensions/ArrayExtensions.ts | 325 +---------------- src/extensions/Extensions.ts | 7 - src/extensions/General/Extensions.ts | 9 + src/extensions/General/ExtensionsTypings.ts | 8 + src/extensions/StringExtensions.ts | 11 +- src/server/DashUploadUtils.ts | 143 ++++++++ .../apis/google/CustomizedWrapper/filters.js | 46 --- src/server/apis/google/GoogleApiServerUtils.ts | 32 +- src/server/apis/google/GooglePhotosUploadUtils.ts | 147 +------- src/server/apis/google/existing_uploads.json | 0 src/server/credentials/google_docs_token.json | 8 +- src/server/database.ts | 402 ++++++++++++--------- src/server/index.ts | 49 +-- 16 files changed, 465 insertions(+), 739 deletions(-) delete mode 100644 src/extensions/Extensions.ts create mode 100644 src/extensions/General/Extensions.ts create mode 100644 src/extensions/General/ExtensionsTypings.ts create mode 100644 src/server/DashUploadUtils.ts delete mode 100644 src/server/apis/google/CustomizedWrapper/filters.js delete mode 100644 src/server/apis/google/existing_uploads.json (limited to 'src/extensions/ArrayExtensions.ts') diff --git a/src/Utils.ts b/src/Utils.ts index a842f5a20..aa2998971 100644 --- a/src/Utils.ts +++ b/src/Utils.ts @@ -4,6 +4,7 @@ import { Socket } from 'socket.io'; import { Message } from './server/Message'; import { RouteStore } from './server/RouteStore'; import requestPromise = require('request-promise'); +import { CurrentUserUtils } from './server/authentication/models/current_user_utils'; export class Utils { @@ -293,12 +294,13 @@ export namespace JSONUtils { } -export function PostToServer(relativeRoute: string, body: any) { +export function PostToServer(relativeRoute: string, body?: any) { + body = { userId: CurrentUserUtils.id, ...body }; let options = { method: "POST", uri: Utils.prepend(relativeRoute), json: true, - body: body + body }; return requestPromise.post(options); } \ No newline at end of file diff --git a/src/client/apis/google_docs/GooglePhotosClientUtils.ts b/src/client/apis/google_docs/GooglePhotosClientUtils.ts index 671d05421..0e09ad85b 100644 --- a/src/client/apis/google_docs/GooglePhotosClientUtils.ts +++ b/src/client/apis/google_docs/GooglePhotosClientUtils.ts @@ -12,17 +12,11 @@ import { FormattedTextBox } from "../../views/nodes/FormattedTextBox"; import { Docs, DocumentOptions } from "../../documents/Documents"; import { NewMediaItemResult, MediaItem } from "../../../server/apis/google/SharedTypes"; import { AssertionError } from "assert"; -import { List } from "../../../new_fields/List"; -import { listSpec } from "../../../new_fields/Schema"; import { DocumentView } from "../../views/nodes/DocumentView"; export namespace GooglePhotos { - const endpoint = async () => { - const getToken = Utils.prepend(RouteStore.googlePhotosAccessToken); - const token = await (await fetch(getToken)).text(); - return new Photos(token); - }; + const endpoint = async () => new Photos(await PostToServer(RouteStore.googlePhotosAccessToken)); export enum MediaType { ALL_MEDIA = 'ALL_MEDIA', diff --git a/src/client/views/Main.tsx b/src/client/views/Main.tsx index 70d2235e6..3bd898ac0 100644 --- a/src/client/views/Main.tsx +++ b/src/client/views/Main.tsx @@ -7,6 +7,9 @@ import { Cast } from "../../new_fields/Types"; import { Doc, DocListCastAsync } from "../../new_fields/Doc"; import { List } from "../../new_fields/List"; import { DocServer } from "../DocServer"; +import { AssignAllExtensions } from "../../extensions/General/Extensions"; + +AssignAllExtensions(); let swapDocs = async () => { let oldDoc = await Cast(CurrentUserUtils.UserDocument.linkManagerDoc, Doc); diff --git a/src/extensions/ArrayExtensions.ts b/src/extensions/ArrayExtensions.ts index ca407862b..422a10dbc 100644 --- a/src/extensions/ArrayExtensions.ts +++ b/src/extensions/ArrayExtensions.ts @@ -1,318 +1,13 @@ -interface Array { - lastElement(): T; -} - -// interface BatchContext { -// completedBatches: number; -// remainingBatches: number; -// } - -// interface ExecutorResult { -// updated: A; -// makeNextBatch: boolean; -// } - -// interface PredicateBatcherCommon { -// initial: A; -// persistAccumulator?: boolean; -// } - -// interface Interval { -// magnitude: number; -// unit: typeof module.exports.TimeUnit; -// } - -// type BatchConverterSync = (batch: I[], context: BatchContext) => O[]; -// type BatchConverterAsync = (batch: I[], context: BatchContext) => Promise; -// type BatchConverter = BatchConverterSync | BatchConverterAsync; - -// type BatchHandlerSync = (batch: I[], context: BatchContext) => void; -// type BatchHandlerAsync = (batch: I[], context: BatchContext) => Promise; -// type BatchHandler = BatchHandlerSync | BatchHandlerAsync; - -// type BatcherSync = FixedBatcher | PredicateBatcherSync; -// type BatcherAsync = PredicateBatcherAsync; -// type Batcher = BatcherSync | BatcherAsync; - -// type FixedBatcher = { batchSize: number } | { batchCount: number, mode?: typeof module.exports.Mode }; -// type PredicateBatcherSync = PredicateBatcherCommon & { executor: (element: I, accumulator: A) => ExecutorResult }; -// type PredicateBatcherAsync = PredicateBatcherCommon & { executorAsync: (element: I, accumulator: A) => Promise> }; - - -// module.exports.Mode = { -// Balanced: 0, -// Even: 1 -// }; - -// module.exports.TimeUnit = { -// Milliseconds: 0, -// Seconds: 1, -// Minutes: 2 -// }; - -// module.exports.Assign = function () { - -// Array.prototype.fixedBatch = function (batcher: FixedBatcher): T[][] { -// const batches: T[][] = []; -// const length = this.length; -// let i = 0; -// if ("batchSize" in batcher) { -// const { batchSize } = batcher; -// while (i < this.length) { -// const cap = Math.min(i + batchSize, length); -// batches.push(this.slice(i, i = cap)); -// } -// } else if ("batchCount" in batcher) { -// let { batchCount, mode } = batcher; -// const resolved = mode || module.exports.Mode.Balanced; -// if (batchCount < 1) { -// throw new Error("Batch count must be a positive integer!"); -// } -// if (batchCount === 1) { -// return [this]; -// } -// if (batchCount >= this.length) { -// return this.map((element: T) => [element]); -// } +function Assign() { -// let length = this.length; -// let size: number; + Array.prototype.lastElement = function () { + if (!this.length) { + return undefined; + } + const last: T = this[this.length - 1]; + return last; + }; -// if (length % batchCount === 0) { -// size = Math.floor(length / batchCount); -// while (i < length) { -// batches.push(this.slice(i, i += size)); -// } -// } else if (resolved === module.exports.Mode.Balanced) { -// while (i < length) { -// size = Math.ceil((length - i) / batchCount--); -// batches.push(this.slice(i, i += size)); -// } -// } else { -// batchCount--; -// size = Math.floor(length / batchCount); -// if (length % size === 0) { -// size--; -// } -// while (i < size * batchCount) { -// batches.push(this.slice(i, i += size)); -// } -// batches.push(this.slice(size * batchCount)); -// } -// } -// return batches; -// }; - -// Array.prototype.predicateBatch = function (batcher: PredicateBatcherSync): T[][] { -// const batches: T[][] = []; -// let batch: T[] = []; -// const { executor, initial, persistAccumulator } = batcher; -// let accumulator = initial; -// for (let element of this) { -// const { updated, makeNextBatch } = executor(element, accumulator); -// accumulator = updated; -// if (!makeNextBatch) { -// batch.push(element); -// } else { -// batches.push(batch); -// batch = [element]; -// if (!persistAccumulator) { -// accumulator = initial; -// } -// } -// } -// batches.push(batch); -// return batches; -// }; - -// Array.prototype.predicateBatchAsync = async function (batcher: BatcherAsync): Promise { -// const batches: T[][] = []; -// let batch: T[] = []; -// const { executorAsync, initial, persistAccumulator } = batcher; -// let accumulator: A = initial; -// for (let element of this) { -// const { updated, makeNextBatch } = await executorAsync(element, accumulator); -// accumulator = updated; -// if (!makeNextBatch) { -// batch.push(element); -// } else { -// batches.push(batch); -// batch = [element]; -// if (!persistAccumulator) { -// accumulator = initial; -// } -// } -// } -// batches.push(batch); -// return batches; -// }; - -// Array.prototype.batch = function (batcher: BatcherSync): T[][] { -// if ("executor" in batcher) { -// return this.predicateBatch(batcher); -// } else { -// return this.fixedBatch(batcher); -// } -// }; - -// Array.prototype.batchAsync = async function (batcher: Batcher): Promise { -// if ("executorAsync" in batcher) { -// return this.predicateBatchAsync(batcher); -// } else { -// return this.batch(batcher); -// } -// }; - -// Array.prototype.batchedForEach = function (batcher: BatcherSync, handler: BatchHandlerSync): void { -// if (this.length) { -// let completed = 0; -// const batches = this.batch(batcher); -// const quota = batches.length; -// for (let batch of batches) { -// const context: BatchContext = { -// completedBatches: completed, -// remainingBatches: quota - completed, -// }; -// handler(batch, context); -// completed++; -// } -// } -// }; - -// Array.prototype.batchedMap = function (batcher: BatcherSync, handler: BatchConverterSync): O[] { -// if (!this.length) { -// return []; -// } -// let collector: O[] = []; -// let completed = 0; -// const batches = this.batch(batcher); -// const quota = batches.length; -// for (let batch of batches) { -// const context: BatchContext = { -// completedBatches: completed, -// remainingBatches: quota - completed, -// }; -// collector.push(...handler(batch, context)); -// completed++; -// } -// return collector; -// }; - -// Array.prototype.batchedForEachAsync = async function (batcher: Batcher, handler: BatchHandler): Promise { -// if (this.length) { -// let completed = 0; -// const batches = await this.batchAsync(batcher); -// const quota = batches.length; -// for (let batch of batches) { -// const context: BatchContext = { -// completedBatches: completed, -// remainingBatches: quota - completed, -// }; -// await handler(batch, context); -// completed++; -// } -// } -// }; - -// Array.prototype.batchedMapAsync = async function (batcher: Batcher, handler: BatchConverter): Promise { -// if (!this.length) { -// return []; -// } -// let collector: O[] = []; -// let completed = 0; -// const batches = await this.batchAsync(batcher); -// const quota = batches.length; -// for (let batch of batches) { -// const context: BatchContext = { -// completedBatches: completed, -// remainingBatches: quota - completed, -// }; -// collector.push(...(await handler(batch, context))); -// completed++; -// } -// return collector; -// }; - -// Array.prototype.batchedForEachInterval = async function (batcher: Batcher, handler: BatchHandler, interval: Interval): Promise { -// if (!this.length) { -// return; -// } -// const batches = await this.batchAsync(batcher); -// const quota = batches.length; -// return new Promise(async resolve => { -// const iterator = batches[Symbol.iterator](); -// let completed = 0; -// while (true) { -// const next = iterator.next(); -// await new Promise(resolve => { -// setTimeout(async () => { -// const batch = next.value; -// const context: BatchContext = { -// completedBatches: completed, -// remainingBatches: quota - completed, -// }; -// await handler(batch, context); -// resolve(); -// }, convert(interval)); -// }); -// if (++completed === quota) { -// break; -// } -// } -// resolve(); -// }); -// }; - -// Array.prototype.batchedMapInterval = async function (batcher: Batcher, handler: BatchConverter, interval: Interval): Promise { -// if (!this.length) { -// return []; -// } -// let collector: O[] = []; -// const batches = await this.batchAsync(batcher); -// const quota = batches.length; -// return new Promise(async resolve => { -// const iterator = batches[Symbol.iterator](); -// let completed = 0; -// while (true) { -// const next = iterator.next(); -// await new Promise(resolve => { -// setTimeout(async () => { -// const batch = next.value; -// const context: BatchContext = { -// completedBatches: completed, -// remainingBatches: quota - completed, -// }; -// collector.push(...(await handler(batch, context))); -// resolve(); -// }, convert(interval)); -// }); -// if (++completed === quota) { -// resolve(collector); -// break; -// } -// } -// }); -// }; - -Array.prototype.lastElement = function () { - if (!this.length) { - return undefined; - } - const last: T = this[this.length - 1]; - return last; -}; - -// }; +} -// const convert = (interval: Interval) => { -// const { magnitude, unit } = interval; -// switch (unit) { -// default: -// case module.exports.TimeUnit.Milliseconds: -// return magnitude; -// case module.exports.TimeUnit.Seconds: -// return magnitude * 1000; -// case module.exports.TimeUnit.Minutes: -// return magnitude * 1000 * 60; -// } -// }; \ No newline at end of file +export { Assign }; \ No newline at end of file diff --git a/src/extensions/Extensions.ts b/src/extensions/Extensions.ts deleted file mode 100644 index 1391140b9..000000000 --- a/src/extensions/Extensions.ts +++ /dev/null @@ -1,7 +0,0 @@ -const ArrayExtensions = require("./ArrayExtensions"); -const StringExtensions = require("./StringExtensions"); - -module.exports.AssignExtensions = function () { - // ArrayExtensions.Assign(); - StringExtensions.Assign(); -}; \ No newline at end of file diff --git a/src/extensions/General/Extensions.ts b/src/extensions/General/Extensions.ts new file mode 100644 index 000000000..4b6d05d5f --- /dev/null +++ b/src/extensions/General/Extensions.ts @@ -0,0 +1,9 @@ +import { Assign as ArrayAssign } from "../ArrayExtensions"; +import { Assign as StringAssign } from "../StringExtensions"; + +function AssignAllExtensions() { + ArrayAssign(); + StringAssign(); +} + +export { AssignAllExtensions }; \ No newline at end of file diff --git a/src/extensions/General/ExtensionsTypings.ts b/src/extensions/General/ExtensionsTypings.ts new file mode 100644 index 000000000..370157ed0 --- /dev/null +++ b/src/extensions/General/ExtensionsTypings.ts @@ -0,0 +1,8 @@ +interface Array { + lastElement(): T; +} + +interface String { + removeTrailingNewlines(): string; + hasNewline(): boolean; +} \ No newline at end of file diff --git a/src/extensions/StringExtensions.ts b/src/extensions/StringExtensions.ts index 4cdbdebf7..2c76e56c8 100644 --- a/src/extensions/StringExtensions.ts +++ b/src/extensions/StringExtensions.ts @@ -1,9 +1,4 @@ -interface String { - removeTrailingNewlines(): string; - hasNewline(): boolean; -} - -module.exports.Assign = function () { +function Assign() { String.prototype.removeTrailingNewlines = function () { let sliced = this; @@ -17,4 +12,6 @@ module.exports.Assign = function () { return this.endsWith("\n"); }; -}; \ No newline at end of file +} + +export { Assign }; \ No newline at end of file diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts new file mode 100644 index 000000000..66874e96c --- /dev/null +++ b/src/server/DashUploadUtils.ts @@ -0,0 +1,143 @@ +import * as fs from 'fs'; +import { Utils } from '../Utils'; +import * as path from 'path'; +import { Opt } from '../new_fields/Doc'; +import * as sharp from 'sharp'; +import request = require('request-promise'); + +const uploadDirectory = path.join(__dirname, './public/files/'); + +export namespace DashUploadUtils { + + export interface Size { + width: number; + suffix: string; + } + + export const Sizes: { [size: string]: Size } = { + SMALL: { width: 100, suffix: "_s" }, + MEDIUM: { width: 400, suffix: "_m" }, + LARGE: { width: 900, suffix: "_l" }, + }; + + const gifs = [".gif"]; + const pngs = [".png"]; + const jpgs = [".jpg", ".jpeg"]; + const imageFormats = [...pngs, ...jpgs, ...gifs]; + const videoFormats = [".mov", ".mp4"]; + + const size = "content-length"; + const type = "content-type"; + + export interface UploadInformation { + mediaPaths: string[]; + fileNames: { [key: string]: string }; + contentSize?: number; + contentType?: string; + } + + const generate = (prefix: string, url: string) => `${prefix}upload_${Utils.GenerateGuid()}${path.extname(url).toLowerCase()}`; + const sanitize = (filename: string) => filename.replace(/\s+/g, "_"); + + export interface InspectionResults { + isLocal: boolean; + stream: any; + normalizedUrl: string; + contentSize?: number; + contentType?: string; + } + + export const InspectImage = async (url: string): Promise => { + const { isLocal, stream, normalized: normalizedUrl } = classify(url); + const results = { + isLocal, + stream, + normalizedUrl + }; + if (isLocal) { + return results; + } + const metadata = (await new Promise((resolve, reject) => { + request.head(url, async (error, res) => { + if (error) { + return reject(error); + } + resolve(res); + }); + })).headers; + return { + contentSize: parseInt(metadata[size]), + contentType: metadata[type], + ...results + }; + }; + + export const UploadImage = async (metadata: InspectionResults, filename?: string, prefix = ""): Promise> => { + const { isLocal, stream, normalizedUrl, contentSize, contentType } = metadata; + const resolved = filename ? sanitize(filename) : generate(prefix, normalizedUrl); + let extension = path.extname(normalizedUrl) || path.extname(resolved); + extension && (extension = extension.toLowerCase()); + let information: UploadInformation = { + mediaPaths: [], + fileNames: { clean: resolved }, + contentSize, + contentType, + }; + return new Promise(async (resolve, reject) => { + const resizers = [ + { resizer: sharp().rotate(), suffix: "_o" }, + ...Object.values(Sizes).map(size => ({ + resizer: sharp().resize(size.width, undefined, { withoutEnlargement: true }).rotate(), + suffix: size.suffix + })) + ]; + let nonVisual = false; + if (pngs.includes(extension)) { + resizers.forEach(element => element.resizer = element.resizer.png()); + } else if (jpgs.includes(extension)) { + resizers.forEach(element => element.resizer = element.resizer.jpeg()); + } else if (![...imageFormats, ...videoFormats].includes(extension.toLowerCase())) { + nonVisual = true; + } + if (imageFormats.includes(extension)) { + for (let resizer of resizers) { + const suffix = resizer.suffix; + let mediaPath: string; + await new Promise(resolve => { + const filename = resolved.substring(0, resolved.length - extension.length) + suffix + extension; + information.mediaPaths.push(mediaPath = uploadDirectory + filename); + information.fileNames[suffix] = filename; + stream(normalizedUrl).pipe(resizer.resizer).pipe(fs.createWriteStream(mediaPath)) + .on('close', resolve) + .on('error', reject); + }); + } + } + if (!isLocal || nonVisual) { + await new Promise(resolve => { + stream(normalizedUrl).pipe(fs.createWriteStream(uploadDirectory + resolved)).on('close', resolve); + }); + } + resolve(information); + }); + }; + + const classify = (url: string) => { + const isLocal = /Dash-Web(\\|\/)src(\\|\/)server(\\|\/)public(\\|\/)files/g.test(url); + return { + isLocal, + stream: isLocal ? fs.createReadStream : request, + normalized: isLocal ? path.normalize(url) : url + }; + }; + + export const createIfNotExists = async (path: string) => { + if (await new Promise(resolve => fs.exists(path, resolve))) { + return true; + } + return new Promise(resolve => fs.mkdir(path, error => resolve(error === null))); + }; + + export const Destroy = (mediaPath: string) => new Promise(resolve => fs.unlink(mediaPath, error => resolve(error === null))); + +} \ No newline at end of file diff --git a/src/server/apis/google/CustomizedWrapper/filters.js b/src/server/apis/google/CustomizedWrapper/filters.js deleted file mode 100644 index 576a90b75..000000000 --- a/src/server/apis/google/CustomizedWrapper/filters.js +++ /dev/null @@ -1,46 +0,0 @@ -'use strict'; - -const DateFilter = require('../common/date_filter'); -const MediaTypeFilter = require('./media_type_filter'); -const ContentFilter = require('./content_filter'); - -class Filters { - constructor(includeArchivedMedia = false) { - this.includeArchivedMedia = includeArchivedMedia; - } - - setDateFilter(dateFilter) { - this.dateFilter = dateFilter; - return this; - } - - setContentFilter(contentFilter) { - this.contentFilter = contentFilter; - return this; - } - - setMediaTypeFilter(mediaTypeFilter) { - this.mediaTypeFilter = mediaTypeFilter; - return this; - } - - setIncludeArchivedMedia(includeArchivedMedia) { - this.includeArchivedMedia = includeArchivedMedia; - return this; - } - - toJSON() { - return { - dateFilter: this.dateFilter instanceof DateFilter ? this.dateFilter.toJSON() : this.dateFilter, - mediaTypeFilter: this.mediaTypeFilter instanceof MediaTypeFilter ? - this.mediaTypeFilter.toJSON() : - this.mediaTypeFilter, - contentFilter: this.contentFilter instanceof ContentFilter ? - this.contentFilter.toJSON() : - this.contentFilter, - includeArchivedMedia: this.includeArchivedMedia - }; - } -} - -module.exports = Filters; \ No newline at end of file diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index e0bd8a800..684a8081b 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -8,7 +8,7 @@ import { GaxiosResponse } from "gaxios"; import request = require('request-promise'); import * as qs from 'query-string'; import Photos = require('googlephotos'); - +import { Database } from "../../database"; /** * Server side authentication for Google Api queries. */ @@ -35,9 +35,9 @@ export namespace GoogleApiServerUtils { Slides = "Slides" } - export interface CredentialPaths { + export interface CredentialInformation { credentialsPath: string; - tokenPath: string; + userId: string; } export type ApiResponse = Promise; @@ -48,7 +48,7 @@ export namespace GoogleApiServerUtils { export type Endpoint = { get: ApiHandler, create: ApiHandler, batchUpdate: ApiHandler }; export type EndpointParameters = GlobalOptions & { version: "v1" }; - export const GetEndpoint = (sector: string, paths: CredentialPaths) => { + export const GetEndpoint = (sector: string, paths: CredentialInformation) => { return new Promise>(resolve => { RetrieveCredentials(paths).then(authentication => { let routed: Opt; @@ -66,28 +66,28 @@ export namespace GoogleApiServerUtils { }); }; - export const RetrieveCredentials = (paths: CredentialPaths) => { + export const RetrieveCredentials = (information: CredentialInformation) => { return new Promise((resolve, reject) => { - readFile(paths.credentialsPath, async (err, credentials) => { + readFile(information.credentialsPath, async (err, credentials) => { if (err) { reject(err); return console.log('Error loading client secret file:', err); } - authorize(parseBuffer(credentials), paths.tokenPath).then(resolve, reject); + authorize(parseBuffer(credentials), information.userId).then(resolve, reject); }); }); }; - export const RetrieveAccessToken = (paths: CredentialPaths) => { + export const RetrieveAccessToken = (information: CredentialInformation) => { return new Promise((resolve, reject) => { - RetrieveCredentials(paths).then( + RetrieveCredentials(information).then( credentials => resolve(credentials.token.access_token!), error => reject(`Error: unable to authenticate Google Photos API request.\n${error}`) ); }); }; - export const RetrievePhotosEndpoint = (paths: CredentialPaths) => { + export const RetrievePhotosEndpoint = (paths: CredentialInformation) => { return new Promise((resolve, reject) => { RetrieveAccessToken(paths).then( token => resolve(new Photos(token)), @@ -101,20 +101,20 @@ export namespace GoogleApiServerUtils { * Create an OAuth2 client with the given credentials, and returns the promise resolving to the authenticated client * @param {Object} credentials The authorization client credentials. */ - export function authorize(credentials: any, token_path: string): Promise { + export function authorize(credentials: any, userId: string): Promise { const { client_secret, client_id, redirect_uris } = credentials.installed; const oAuth2Client = new google.auth.OAuth2( client_id, client_secret, redirect_uris[0]); return new Promise((resolve, reject) => { - readFile(token_path, (err, token) => { - // Check if we have previously stored a token. - if (err) { - return getNewToken(oAuth2Client, token_path).then(resolve, reject); + Database.Auxiliary.FetchGoogleAuthenticationToken(userId).then(token => { + // Check if we have previously stored a token for this userId. + if (!token) { + return getNewToken(oAuth2Client, userId).then(resolve, reject); } let parsed: Credentials = parseBuffer(token); if (parsed.expiry_date! < new Date().getTime()) { - return refreshToken(parsed, client_id, client_secret, oAuth2Client, token_path).then(resolve, reject); + return refreshToken(parsed, client_id, client_secret, oAuth2Client, userId).then(resolve, reject); } oAuth2Client.setCredentials(parsed); resolve({ token: parsed, client: oAuth2Client }); diff --git a/src/server/apis/google/GooglePhotosUploadUtils.ts b/src/server/apis/google/GooglePhotosUploadUtils.ts index 4dc252577..507a868a3 100644 --- a/src/server/apis/google/GooglePhotosUploadUtils.ts +++ b/src/server/apis/google/GooglePhotosUploadUtils.ts @@ -1,16 +1,10 @@ import request = require('request-promise'); import { GoogleApiServerUtils } from './GoogleApiServerUtils'; -import * as fs from 'fs'; -import { Utils } from '../../../Utils'; import * as path from 'path'; -import { Opt } from '../../../new_fields/Doc'; -import * as sharp from 'sharp'; import { MediaItemCreationResult } from './SharedTypes'; import { NewMediaItem } from "../../index"; import BatchedArray, { FixedBatcher, TimeUnit, Interval } from "array-batcher"; -const uploadDirectory = path.join(__dirname, "../../public/files/"); - export namespace GooglePhotosUploadUtils { export interface Paths { @@ -31,12 +25,9 @@ export namespace GooglePhotosUploadUtils { }); let Bearer: string; - let Paths: Paths; - export const initialize = async (paths: Paths) => { - Paths = paths; - const { tokenPath, credentialsPath } = paths; - const token = await GoogleApiServerUtils.RetrieveAccessToken({ tokenPath, credentialsPath }); + export const initialize = async (information: GoogleApiServerUtils.CredentialInformation) => { + const token = await GoogleApiServerUtils.RetrieveAccessToken(information); Bearer = `Bearer ${token}`; }; @@ -87,138 +78,4 @@ export namespace GooglePhotosUploadUtils { return { newMediaItemResults }; }; -} - -export namespace DownloadUtils { - - export interface Size { - width: number; - suffix: string; - } - - export const Sizes: { [size: string]: Size } = { - SMALL: { width: 100, suffix: "_s" }, - MEDIUM: { width: 400, suffix: "_m" }, - LARGE: { width: 900, suffix: "_l" }, - }; - - const gifs = [".gif"]; - const pngs = [".png"]; - const jpgs = [".jpg", ".jpeg"]; - const imageFormats = [...pngs, ...jpgs, ...gifs]; - const videoFormats = [".mov", ".mp4"]; - - const size = "content-length"; - const type = "content-type"; - - export interface UploadInformation { - mediaPaths: string[]; - fileNames: { [key: string]: string }; - contentSize?: number; - contentType?: string; - } - - const generate = (prefix: string, url: string) => `${prefix}upload_${Utils.GenerateGuid()}${path.extname(url).toLowerCase()}`; - const sanitize = (filename: string) => filename.replace(/\s+/g, "_"); - - export interface InspectionResults { - isLocal: boolean; - stream: any; - normalizedUrl: string; - contentSize?: number; - contentType?: string; - } - - export const InspectImage = async (url: string): Promise => { - const { isLocal, stream, normalized: normalizedUrl } = classify(url); - const results = { - isLocal, - stream, - normalizedUrl - }; - if (isLocal) { - return results; - } - const metadata = (await new Promise((resolve, reject) => { - request.head(url, async (error, res) => { - if (error) { - return reject(error); - } - resolve(res); - }); - })).headers; - return { - contentSize: parseInt(metadata[size]), - contentType: metadata[type], - ...results - }; - }; - - export const UploadImage = async (metadata: InspectionResults, filename?: string, prefix = ""): Promise> => { - const { isLocal, stream, normalizedUrl, contentSize, contentType } = metadata; - const resolved = filename ? sanitize(filename) : generate(prefix, normalizedUrl); - let extension = path.extname(normalizedUrl) || path.extname(resolved); - extension && (extension = extension.toLowerCase()); - let information: UploadInformation = { - mediaPaths: [], - fileNames: { clean: resolved }, - contentSize, - contentType, - }; - return new Promise(async (resolve, reject) => { - const resizers = [ - { resizer: sharp().rotate(), suffix: "_o" }, - ...Object.values(Sizes).map(size => ({ - resizer: sharp().resize(size.width, undefined, { withoutEnlargement: true }).rotate(), - suffix: size.suffix - })) - ]; - let nonVisual = false; - if (pngs.includes(extension)) { - resizers.forEach(element => element.resizer = element.resizer.png()); - } else if (jpgs.includes(extension)) { - resizers.forEach(element => element.resizer = element.resizer.jpeg()); - } else if (![...imageFormats, ...videoFormats].includes(extension.toLowerCase())) { - nonVisual = true; - } - if (imageFormats.includes(extension)) { - for (let resizer of resizers) { - const suffix = resizer.suffix; - let mediaPath: string; - await new Promise(resolve => { - const filename = resolved.substring(0, resolved.length - extension.length) + suffix + extension; - information.mediaPaths.push(mediaPath = uploadDirectory + filename); - information.fileNames[suffix] = filename; - stream(normalizedUrl).pipe(resizer.resizer).pipe(fs.createWriteStream(mediaPath)) - .on('close', resolve) - .on('error', reject); - }); - } - } - if (!isLocal || nonVisual) { - await new Promise(resolve => { - stream(normalizedUrl).pipe(fs.createWriteStream(uploadDirectory + resolved)).on('close', resolve); - }); - } - resolve(information); - }); - }; - - const classify = (url: string) => { - const isLocal = /Dash-Web(\\|\/)src(\\|\/)server(\\|\/)public(\\|\/)files/g.test(url); - return { - isLocal, - stream: isLocal ? fs.createReadStream : request, - normalized: isLocal ? path.normalize(url) : url - }; - }; - - export const createIfNotExists = async (path: string) => { - if (await new Promise(resolve => fs.exists(path, resolve))) { - return true; - } - return new Promise(resolve => fs.mkdir(path, error => resolve(error === null))); - }; - - export const Destroy = (mediaPath: string) => new Promise(resolve => fs.unlink(mediaPath, error => resolve(error === null))); } \ No newline at end of file diff --git a/src/server/apis/google/existing_uploads.json b/src/server/apis/google/existing_uploads.json deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/server/credentials/google_docs_token.json b/src/server/credentials/google_docs_token.json index ee44c3f30..8bd62bdfa 100644 --- a/src/server/credentials/google_docs_token.json +++ b/src/server/credentials/google_docs_token.json @@ -1 +1,7 @@ -{"access_token":"ya29.GlyKBznz91v_qb8RYt4PT40Hp106N08Yk64UjMAKllBsIqJQEzBkxLbB5q5paydywHzguQYSNup5fT7ojJTDU4CMZdPbPKGcjQz17w_CospcG-8Buz94KZptvlQ_pQ","refresh_token":"1/HTv_xFHszu2Nf3iiFrUTaeKzC_Vp2-6bpIB06xW_WHI","scope":"https://www.googleapis.com/auth/presentations.readonly https://www.googleapis.com/auth/documents.readonly https://www.googleapis.com/auth/drive.file https://www.googleapis.com/auth/documents https://www.googleapis.com/auth/photoslibrary https://www.googleapis.com/auth/photoslibrary.appendonly https://www.googleapis.com/auth/drive https://www.googleapis.com/auth/presentations https://www.googleapis.com/auth/photoslibrary.sharing","token_type":"Bearer","expiry_date":1569093749804} \ No newline at end of file +{ + "access_token": "ya29.ImCOBwXgckGbyHNLMX7r-13B5VDgxfzF5mQ7lFJ0FX5GF5EuAPBBN5_ijLnNLC4yw4xtFjJOkEtKiYr-60OIm4oOnowEJpZMyRGxFMy_Q8MTnzDpeN-7Di_baUzcu7m_KWM", + "refresh_token": "1/HTv_xFHszu2Nf3iiFrUTaeKzC_Vp2-6bpIB06xW_WHI", + "scope": "https://www.googleapis.com/auth/presentations.readonly https://www.googleapis.com/auth/documents.readonly https://www.googleapis.com/auth/drive.file https://www.googleapis.com/auth/documents https://www.googleapis.com/auth/photoslibrary https://www.googleapis.com/auth/photoslibrary.appendonly https://www.googleapis.com/auth/drive https://www.googleapis.com/auth/presentations https://www.googleapis.com/auth/photoslibrary.sharing", + "token_type": "Bearer", + "expiry_date": 1569366907812 +} \ No newline at end of file diff --git a/src/server/database.ts b/src/server/database.ts index a7254fb0c..ce29478ad 100644 --- a/src/server/database.ts +++ b/src/server/database.ts @@ -1,209 +1,267 @@ import * as mongodb from 'mongodb'; import { Transferable } from './Message'; +import { Opt } from '../new_fields/Doc'; +import { Utils } from '../Utils'; +import { DashUploadUtils } from './DashUploadUtils'; -export class Database { - public static DocumentsCollection = 'documents'; - public static Instance = new Database(); - private MongoClient = mongodb.MongoClient; - private url = 'mongodb://localhost:27017/Dash'; - private currentWrites: { [id: string]: Promise } = {}; - private db?: mongodb.Db; - private onConnect: (() => void)[] = []; - - constructor() { - this.MongoClient.connect(this.url, (err, client) => { - this.db = client.db(); - this.onConnect.forEach(fn => fn()); - }); - } +export namespace Database { - public update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = Database.DocumentsCollection) { - if (this.db) { - let collection = this.db.collection(collectionName); - const prom = this.currentWrites[id]; - let newProm: Promise; - const run = (): Promise => { - return new Promise(resolve => { - collection.updateOne({ _id: id }, value, { upsert } - , (err, res) => { - if (this.currentWrites[id] === newProm) { - delete this.currentWrites[id]; - } - resolve(); - callback(err, res); - }); - }); - }; - newProm = prom ? prom.then(run) : run(); - this.currentWrites[id] = newProm; - } else { - this.onConnect.push(() => this.update(id, value, callback, upsert, collectionName)); - } - } + class Database { + public static DocumentsCollection = 'documents'; + private MongoClient = mongodb.MongoClient; + private url = 'mongodb://localhost:27017/Dash'; + private currentWrites: { [id: string]: Promise } = {}; + private db?: mongodb.Db; + private onConnect: (() => void)[] = []; - public replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = Database.DocumentsCollection) { - if (this.db) { - let collection = this.db.collection(collectionName); - const prom = this.currentWrites[id]; - let newProm: Promise; - const run = (): Promise => { - return new Promise(resolve => { - collection.replaceOne({ _id: id }, value, { upsert } - , (err, res) => { - if (this.currentWrites[id] === newProm) { - delete this.currentWrites[id]; - } - resolve(); - callback(err, res); - }); - }); - }; - newProm = prom ? prom.then(run) : run(); - this.currentWrites[id] = newProm; - } else { - this.onConnect.push(() => this.replace(id, value, callback, upsert, collectionName)); + constructor() { + this.MongoClient.connect(this.url, (err, client) => { + this.db = client.db(); + this.onConnect.forEach(fn => fn()); + }); } - } - public delete(query: any, collectionName?: string): Promise; - public delete(id: string, collectionName?: string): Promise; - public delete(id: any, collectionName = Database.DocumentsCollection) { - if (typeof id === "string") { - id = { _id: id }; - } - if (this.db) { - const db = this.db; - return new Promise(res => db.collection(collectionName).deleteMany(id, (err, result) => res(result))); - } else { - return new Promise(res => this.onConnect.push(() => res(this.delete(id, collectionName)))); + public update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = Database.DocumentsCollection) { + if (this.db) { + let collection = this.db.collection(collectionName); + const prom = this.currentWrites[id]; + let newProm: Promise; + const run = (): Promise => { + return new Promise(resolve => { + collection.updateOne({ _id: id }, value, { upsert } + , (err, res) => { + if (this.currentWrites[id] === newProm) { + delete this.currentWrites[id]; + } + resolve(); + callback(err, res); + }); + }); + }; + newProm = prom ? prom.then(run) : run(); + this.currentWrites[id] = newProm; + } else { + this.onConnect.push(() => this.update(id, value, callback, upsert, collectionName)); + } } - } - public deleteAll(collectionName = Database.DocumentsCollection): Promise { - return new Promise(res => { + public replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = Database.DocumentsCollection) { if (this.db) { - this.db.collection(collectionName).deleteMany({}, res); + let collection = this.db.collection(collectionName); + const prom = this.currentWrites[id]; + let newProm: Promise; + const run = (): Promise => { + return new Promise(resolve => { + collection.replaceOne({ _id: id }, value, { upsert } + , (err, res) => { + if (this.currentWrites[id] === newProm) { + delete this.currentWrites[id]; + } + resolve(); + callback(err, res); + }); + }); + }; + newProm = prom ? prom.then(run) : run(); + this.currentWrites[id] = newProm; } else { - this.onConnect.push(() => this.db && this.db.collection(collectionName).deleteMany({}, res)); + this.onConnect.push(() => this.replace(id, value, callback, upsert, collectionName)); } - }); - } + } - public insert(value: any, collectionName = Database.DocumentsCollection) { - if (this.db) { - if ("id" in value) { - value._id = value.id; - delete value.id; + public delete(query: any, collectionName?: string): Promise; + public delete(id: string, collectionName?: string): Promise; + public delete(id: any, collectionName = Database.DocumentsCollection) { + if (typeof id === "string") { + id = { _id: id }; + } + if (this.db) { + const db = this.db; + return new Promise(res => db.collection(collectionName).deleteMany(id, (err, result) => res(result))); + } else { + return new Promise(res => this.onConnect.push(() => res(this.delete(id, collectionName)))); } - const id = value._id; - const collection = this.db.collection(collectionName); - const prom = this.currentWrites[id]; - let newProm: Promise; - const run = (): Promise => { - return new Promise(resolve => { - collection.insertOne(value, (err, res) => { - if (this.currentWrites[id] === newProm) { - delete this.currentWrites[id]; - } - resolve(); - }); - }); - }; - newProm = prom ? prom.then(run) : run(); - this.currentWrites[id] = newProm; - } else { - this.onConnect.push(() => this.insert(value, collectionName)); } - } - public getDocument(id: string, fn: (result?: Transferable) => void, collectionName = Database.DocumentsCollection) { - if (this.db) { - this.db.collection(collectionName).findOne({ _id: id }, (err, result) => { - if (result) { - result.id = result._id; - delete result._id; - fn(result); + public async deleteAll(collectionName = Database.DocumentsCollection, persist = true): Promise { + return new Promise(resolve => { + const executor = async (database: mongodb.Db) => { + if (persist) { + await database.collection(collectionName).deleteMany({}); + } else { + await database.dropCollection(collectionName); + } + resolve(); + }; + if (this.db) { + executor(this.db); } else { - fn(undefined); + this.onConnect.push(() => this.db && executor(this.db)); } }); - } else { - this.onConnect.push(() => this.getDocument(id, fn, collectionName)); } - } - public getDocuments(ids: string[], fn: (result: Transferable[]) => void, collectionName = Database.DocumentsCollection) { - if (this.db) { - this.db.collection(collectionName).find({ _id: { "$in": ids } }).toArray((err, docs) => { - if (err) { - console.log(err.message); - console.log(err.errmsg); + public async insert(value: any, collectionName = Database.DocumentsCollection) { + if (this.db) { + if ("id" in value) { + value._id = value.id; + delete value.id; } - fn(docs.map(doc => { - doc.id = doc._id; - delete doc._id; - return doc; - })); - }); - } else { - this.onConnect.push(() => this.getDocuments(ids, fn, collectionName)); + const id = value._id; + const collection = this.db.collection(collectionName); + const prom = this.currentWrites[id]; + let newProm: Promise; + const run = (): Promise => { + return new Promise(resolve => { + collection.insertOne(value, (err, res) => { + if (this.currentWrites[id] === newProm) { + delete this.currentWrites[id]; + } + resolve(); + }); + }); + }; + newProm = prom ? prom.then(run) : run(); + this.currentWrites[id] = newProm; + return newProm; + } else { + this.onConnect.push(() => this.insert(value, collectionName)); + } } - } - public async visit(ids: string[], fn: (result: any) => string[], collectionName = "newDocuments"): Promise { - if (this.db) { - const visited = new Set(); - while (ids.length) { - const count = Math.min(ids.length, 1000); - const index = ids.length - count; - const fetchIds = ids.splice(index, count).filter(id => !visited.has(id)); - if (!fetchIds.length) { - continue; - } - const docs = await new Promise<{ [key: string]: any }[]>(res => Database.Instance.getDocuments(fetchIds, res, "newDocuments")); - for (const doc of docs) { - const id = doc.id; - visited.add(id); - ids.push(...fn(doc)); + public getDocument(id: string, fn: (result?: Transferable) => void, collectionName = Database.DocumentsCollection) { + if (this.db) { + this.db.collection(collectionName).findOne({ _id: id }, (err, result) => { + if (result) { + result.id = result._id; + delete result._id; + fn(result); + } else { + fn(undefined); + } + }); + } else { + this.onConnect.push(() => this.getDocument(id, fn, collectionName)); + } + } + + public getDocuments(ids: string[], fn: (result: Transferable[]) => void, collectionName = Database.DocumentsCollection) { + if (this.db) { + this.db.collection(collectionName).find({ _id: { "$in": ids } }).toArray((err, docs) => { + if (err) { + console.log(err.message); + console.log(err.errmsg); + } + fn(docs.map(doc => { + doc.id = doc._id; + delete doc._id; + return doc; + })); + }); + } else { + this.onConnect.push(() => this.getDocuments(ids, fn, collectionName)); + } + } + + public async visit(ids: string[], fn: (result: any) => string[], collectionName = "newDocuments"): Promise { + if (this.db) { + const visited = new Set(); + while (ids.length) { + const count = Math.min(ids.length, 1000); + const index = ids.length - count; + const fetchIds = ids.splice(index, count).filter(id => !visited.has(id)); + if (!fetchIds.length) { + continue; + } + const docs = await new Promise<{ [key: string]: any }[]>(res => Instance.getDocuments(fetchIds, res, "newDocuments")); + for (const doc of docs) { + const id = doc.id; + visited.add(id); + ids.push(...fn(doc)); + } } + + } else { + return new Promise(res => { + this.onConnect.push(() => { + this.visit(ids, fn, collectionName); + res(); + }); + }); } + } - } else { - return new Promise(res => { - this.onConnect.push(() => { - this.visit(ids, fn, collectionName); - res(); + public query(query: { [key: string]: any }, projection?: { [key: string]: 0 | 1 }, collectionName = "newDocuments"): Promise { + if (this.db) { + let cursor = this.db.collection(collectionName).find(query); + if (projection) { + cursor = cursor.project(projection); + } + return Promise.resolve(cursor); + } else { + return new Promise(res => { + this.onConnect.push(() => res(this.query(query, projection, collectionName))); }); - }); + } } - } - public query(query: { [key: string]: any }, projection?: { [key: string]: 0 | 1 }, collectionName = "newDocuments"): Promise { - if (this.db) { - let cursor = this.db.collection(collectionName).find(query); - if (projection) { - cursor = cursor.project(projection); + public updateMany(query: any, update: any, collectionName = "newDocuments") { + if (this.db) { + const db = this.db; + return new Promise(res => db.collection(collectionName).update(query, update, (_, result) => res(result))); + } else { + return new Promise(res => { + this.onConnect.push(() => this.updateMany(query, update, collectionName).then(res)); + }); } - return Promise.resolve(cursor); - } else { - return new Promise(res => { - this.onConnect.push(() => res(this.query(query, projection, collectionName))); - }); } - } - public updateMany(query: any, update: any, collectionName = "newDocuments") { - if (this.db) { - const db = this.db; - return new Promise(res => db.collection(collectionName).update(query, update, (_, result) => res(result))); - } else { - return new Promise(res => { - this.onConnect.push(() => this.updateMany(query, update, collectionName).then(res)); - }); + public print() { + console.log("db says hi!"); } } - public print() { - console.log("db says hi!"); + export const Instance = new Database(); + + export namespace Auxiliary { + + export enum AuxiliaryCollections { + GooglePhotosUploadHistory = "UploadedFromGooglePhotos" + } + + const GoogleAuthentication = "GoogleAuthentication"; + + const SanitizedSingletonQuery = async (query: { [key: string]: any }, collection: string) => { + const cursor = await Instance.query(query, undefined, collection); + const existing = (await cursor.toArray())[0]; + if (existing) { + delete existing._id; + } + return existing; + }; + + export const QueryUploadHistory = async (contentSize: number): Promise> => { + return SanitizedSingletonQuery({ contentSize }, AuxiliaryCollections.GooglePhotosUploadHistory); + }; + + export const LogUpload = async (information: DashUploadUtils.UploadInformation) => { + const bundle = { + _id: Utils.GenerateDeterministicGuid(String(information.contentSize!)), + ...information + }; + return Instance.insert(bundle, AuxiliaryCollections.GooglePhotosUploadHistory); + }; + + export const DeleteAll = async (persist = false) => { + const collectionNames = Object.values(AuxiliaryCollections); + const pendingDeletions = collectionNames.map(name => Instance.deleteAll(name, persist)); + return Promise.all(pendingDeletions); + }; + + export const FetchGoogleAuthenticationToken = async (userId: string) => { + return SanitizedSingletonQuery({ userId }, GoogleAuthentication); + }; + } -} + +} \ No newline at end of file diff --git a/src/server/index.ts b/src/server/index.ts index 4c4cb84d6..386ecce4d 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -41,13 +41,14 @@ var AdmZip = require('adm-zip'); import * as YoutubeApi from "./apis/youtube/youtubeApiSample"; import { Response } from 'express-serve-static-core'; import { GoogleApiServerUtils } from "./apis/google/GoogleApiServerUtils"; -import { GooglePhotosUploadUtils, DownloadUtils as UploadUtils } from './apis/google/GooglePhotosUploadUtils'; +import { GooglePhotosUploadUtils } from './apis/google/GooglePhotosUploadUtils'; const MongoStore = require('connect-mongo')(session); const mongoose = require('mongoose'); const probe = require("probe-image-size"); import * as qs from 'query-string'; import { Opt } from '../new_fields/Doc'; import BatchedArray, { TimeUnit } from "array-batcher"; +import { DashUploadUtils } from './DashUploadUtils'; const download = (url: string, dest: fs.PathLike) => request.get(url).pipe(fs.createWriteStream(dest)); let youtubeApiKey: string; @@ -581,8 +582,8 @@ app.post( for (const key in files) { const { type, path: location, name } = files[key]; const filename = path.basename(location); - const metadata = await UploadUtils.InspectImage(uploadDirectory + filename); - await UploadUtils.UploadImage(metadata, filename).catch(() => console.log(`Unable to process ${filename}`)); + const metadata = await DashUploadUtils.InspectImage(uploadDirectory + filename); + await DashUploadUtils.UploadImage(metadata, filename).catch(() => console.log(`Unable to process ${filename}`)); results.push({ name, type, path: `/files/${filename}` }); } _success(res, results); @@ -809,7 +810,7 @@ const EndpointHandlerMap = new Map { let sector: GoogleApiServerUtils.Service = req.params.sector as GoogleApiServerUtils.Service; let action: GoogleApiServerUtils.Action = req.params.action as GoogleApiServerUtils.Action; - GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], { credentialsPath, tokenPath }).then(endpoint => { + GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], { credentialsPath, userId: req.body.userId }).then(endpoint => { let handler = EndpointHandlerMap.get(action); if (endpoint && handler) { let execute = handler(endpoint, req.body).then( @@ -823,7 +824,7 @@ app.post(RouteStore.googleDocs + "/:sector/:action", (req, res) => { }); }); -app.get(RouteStore.googlePhotosAccessToken, (req, res) => GoogleApiServerUtils.RetrieveAccessToken({ credentialsPath, tokenPath }).then(token => res.send(token))); +app.get(RouteStore.googlePhotosAccessToken, (req, res) => GoogleApiServerUtils.RetrieveAccessToken({ credentialsPath, userId: req.body.userId }).then(token => res.send(token))); const tokenError = "Unable to successfully upload bytes for all images!"; const mediaError = "Unable to convert all uploaded bytes to media items!"; @@ -836,12 +837,13 @@ export interface NewMediaItem { } app.post(RouteStore.googlePhotosMediaUpload, async (req, res) => { - const mediaInput: GooglePhotosUploadUtils.MediaInput[] = req.body.media; - await GooglePhotosUploadUtils.initialize({ uploadDirectory, credentialsPath, tokenPath }); + const { userId, media } = req.body; + + await GooglePhotosUploadUtils.initialize({ credentialsPath, userId }); let failed = 0; - const newMediaItems = await BatchedArray.from(mediaInput, { batchSize: 25 }).batchedMapInterval( + const newMediaItems = await BatchedArray.from(media, { batchSize: 25 }).batchedMapInterval( async (batch: GooglePhotosUploadUtils.MediaInput[]) => { const newMediaItems: NewMediaItem[] = []; for (let element of batch) { @@ -879,31 +881,36 @@ const prefix = "google_photos_"; const downloadError = "Encountered an error while executing downloads."; const requestError = "Unable to execute download: the body's media items were malformed."; -app.get("/gapiCleanup", (req, res) => { - write_text_file(file, ""); +app.get("/deleteWithAux", async (req, res) => { + await Database.Auxiliary.DeleteAll(); res.redirect(RouteStore.delete); }); -const file = "./apis/google/existing_uploads.json"; +const UploadError = (count: number) => `Unable to upload ${count} images to Dash's server`; app.post(RouteStore.googlePhotosMediaDownload, async (req, res) => { const contents: { mediaItems: MediaItem[] } = req.body; + let failed = 0; if (contents) { - const completed: Opt[] = []; - const content = await read_text_file(file); - let existing = content.length ? JSON.parse(content) : {}; + const completed: Opt[] = []; for (let item of contents.mediaItems) { - const { contentSize, ...attributes } = await UploadUtils.InspectImage(item.baseUrl); - const found: UploadUtils.UploadInformation = existing[contentSize!]; + const { contentSize, ...attributes } = await DashUploadUtils.InspectImage(item.baseUrl); + const found: Opt = await Database.Auxiliary.QueryUploadHistory(contentSize!); if (!found) { - const upload = await UploadUtils.UploadImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error)); - upload && completed.push(existing[contentSize!] = upload); + const upload = await DashUploadUtils.UploadImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error)); + if (upload) { + completed.push(upload); + await Database.Auxiliary.LogUpload(upload); + } else { + failed++; + } } else { completed.push(found); } } - await write_text_file(file, JSON.stringify(existing)); - _success(res, completed); - return; + if (failed) { + return _error(res, UploadError(failed)); + } + return _success(res, completed); } _invalid(res, requestError); }); -- cgit v1.2.3-70-g09d2