aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/client/documents/Documents.ts12
-rw-r--r--src/client/util/Import & Export/DirectoryImportBox.tsx818
-rw-r--r--src/client/views/nodes/DocumentContentsView.tsx2
-rw-r--r--src/server/ApiManagers/GooglePhotosManager.ts614
-rw-r--r--src/server/index.ts4
5 files changed, 718 insertions, 732 deletions
diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts
index 37196187b..1b39fdb41 100644
--- a/src/client/documents/Documents.ts
+++ b/src/client/documents/Documents.ts
@@ -20,7 +20,6 @@ import { YoutubeBox } from '../apis/youtube/YoutubeBox';
import { DocServer } from '../DocServer';
import { Networking } from '../Network';
import { DragManager, dropActionType } from '../util/DragManager';
-import { DirectoryImportBox } from '../util/Import & Export/DirectoryImportBox';
import { FollowLinkScript } from '../util/LinkFollower';
import { LinkManager } from '../util/LinkManager';
import { ScriptingGlobals } from '../util/ScriptingGlobals';
@@ -568,13 +567,6 @@ export namespace Docs {
},
],
[
- DocumentType.IMPORT,
- {
- layout: { view: DirectoryImportBox, dataField: defaultDataKey },
- options: { _height: 150 },
- },
- ],
- [
DocumentType.LINK,
{
layout: { view: LinkBox, dataField: 'link' },
@@ -1237,10 +1229,6 @@ export namespace Docs {
return ret;
}
- export function DirectoryImportDocument(options: DocumentOptions = {}) {
- return InstanceFromProto(Prototypes.get(DocumentType.IMPORT), new List<Doc>(), options);
- }
-
export type DocConfig = {
doc: Doc;
initialWidth?: number;
diff --git a/src/client/util/Import & Export/DirectoryImportBox.tsx b/src/client/util/Import & Export/DirectoryImportBox.tsx
index 55aac5eb0..b6dbea33a 100644
--- a/src/client/util/Import & Export/DirectoryImportBox.tsx
+++ b/src/client/util/Import & Export/DirectoryImportBox.tsx
@@ -1,439 +1,439 @@
-import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
-import { BatchedArray } from 'array-batcher';
-import { action, computed, IReactionDisposer, observable, reaction, runInAction } from 'mobx';
-import { observer } from 'mobx-react';
-import { extname } from 'path';
-import Measure, { ContentRect } from 'react-measure';
-import { Doc, DocListCast, DocListCastAsync, Opt } from '../../../fields/Doc';
-import { Id } from '../../../fields/FieldSymbols';
-import { List } from '../../../fields/List';
-import { listSpec } from '../../../fields/Schema';
-import { SchemaHeaderField } from '../../../fields/SchemaHeaderField';
-import { BoolCast, Cast, NumCast } from '../../../fields/Types';
-import { AcceptableMedia, Upload } from '../../../server/SharedMediaTypes';
-import { Utils } from '../../../Utils';
-import { GooglePhotos } from '../../apis/google_docs/GooglePhotosClientUtils';
-import { Docs, DocumentOptions, DocUtils } from '../../documents/Documents';
-import { DocumentType } from '../../documents/DocumentTypes';
-import { Networking } from '../../Network';
-import { FieldView, FieldViewProps } from '../../views/nodes/FieldView';
-import { DocumentManager } from '../DocumentManager';
-import './DirectoryImportBox.scss';
-import ImportMetadataEntry, { keyPlaceholder, valuePlaceholder } from './ImportMetadataEntry';
-import * as React from 'react';
+// import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
+// import { BatchedArray } from 'array-batcher';
+// import { action, computed, IReactionDisposer, observable, reaction, runInAction } from 'mobx';
+// import { observer } from 'mobx-react';
+// import { extname } from 'path';
+// import Measure, { ContentRect } from 'react-measure';
+// import { Doc, DocListCast, DocListCastAsync, Opt } from '../../../fields/Doc';
+// import { Id } from '../../../fields/FieldSymbols';
+// import { List } from '../../../fields/List';
+// import { listSpec } from '../../../fields/Schema';
+// import { SchemaHeaderField } from '../../../fields/SchemaHeaderField';
+// import { BoolCast, Cast, NumCast } from '../../../fields/Types';
+// import { AcceptableMedia, Upload } from '../../../server/SharedMediaTypes';
+// import { Utils } from '../../../Utils';
+// import { GooglePhotos } from '../../apis/google_docs/GooglePhotosClientUtils';
+// import { Docs, DocumentOptions, DocUtils } from '../../documents/Documents';
+// import { DocumentType } from '../../documents/DocumentTypes';
+// import { Networking } from '../../Network';
+// import { FieldView, FieldViewProps } from '../../views/nodes/FieldView';
+// import { DocumentManager } from '../DocumentManager';
+// import './DirectoryImportBox.scss';
+// import ImportMetadataEntry, { keyPlaceholder, valuePlaceholder } from './ImportMetadataEntry';
+// import * as React from 'react';
-const unsupported = ['text/html', 'text/plain'];
+// const unsupported = ['text/html', 'text/plain'];
-@observer
-export class DirectoryImportBox extends React.Component<FieldViewProps> {
- private selector = React.createRef<HTMLInputElement>();
- @observable private top = 0;
- @observable private left = 0;
- private dimensions = 50;
- @observable private phase = '';
- private disposer: Opt<IReactionDisposer>;
+// @observer
+// export class DirectoryImportBox extends React.Component<FieldViewProps> {
+// private selector = React.createRef<HTMLInputElement>();
+// @observable private top = 0;
+// @observable private left = 0;
+// private dimensions = 50;
+// @observable private phase = '';
+// private disposer: Opt<IReactionDisposer>;
- @observable private entries: ImportMetadataEntry[] = [];
+// @observable private entries: ImportMetadataEntry[] = [];
- @observable private quota = 1;
- @observable private completed = 0;
+// @observable private quota = 1;
+// @observable private completed = 0;
- @observable private uploading = false;
- @observable private removeHover = false;
+// @observable private uploading = false;
+// @observable private removeHover = false;
- public static LayoutString(fieldKey: string) {
- return FieldView.LayoutString(DirectoryImportBox, fieldKey);
- }
+// public static LayoutString(fieldKey: string) {
+// return FieldView.LayoutString(DirectoryImportBox, fieldKey);
+// }
- constructor(props: FieldViewProps) {
- super(props);
- const doc = this.props.Document;
- this.editingMetadata = this.editingMetadata || false;
- this.persistent = this.persistent || false;
- !Cast(doc.data, listSpec(Doc)) && (doc.data = new List<Doc>());
- }
+// constructor(props: FieldViewProps) {
+// super(props);
+// const doc = this.props.Document;
+// this.editingMetadata = this.editingMetadata || false;
+// this.persistent = this.persistent || false;
+// !Cast(doc.data, listSpec(Doc)) && (doc.data = new List<Doc>());
+// }
- @computed
- private get editingMetadata() {
- return BoolCast(this.props.Document.editingMetadata);
- }
+// @computed
+// private get editingMetadata() {
+// return BoolCast(this.props.Document.editingMetadata);
+// }
- private set editingMetadata(value: boolean) {
- this.props.Document.editingMetadata = value;
- }
+// private set editingMetadata(value: boolean) {
+// this.props.Document.editingMetadata = value;
+// }
- @computed
- private get persistent() {
- return BoolCast(this.props.Document.persistent);
- }
+// @computed
+// private get persistent() {
+// return BoolCast(this.props.Document.persistent);
+// }
- private set persistent(value: boolean) {
- this.props.Document.persistent = value;
- }
+// private set persistent(value: boolean) {
+// this.props.Document.persistent = value;
+// }
- handleSelection = async (e: React.ChangeEvent<HTMLInputElement>) => {
- runInAction(() => {
- this.uploading = true;
- this.phase = 'Initializing download...';
- });
+// handleSelection = async (e: React.ChangeEvent<HTMLInputElement>) => {
+// runInAction(() => {
+// this.uploading = true;
+// this.phase = 'Initializing download...';
+// });
- const docs: Doc[] = [];
+// const docs: Doc[] = [];
- const files = e.target.files;
- if (!files || files.length === 0) return;
+// const files = e.target.files;
+// if (!files || files.length === 0) return;
- const directory = (files.item(0) as any).webkitRelativePath.split('/', 1)[0];
+// const directory = (files.item(0) as any).webkitRelativePath.split('/', 1)[0];
- const validated: File[] = [];
- for (let i = 0; i < files.length; i++) {
- const file = files.item(i);
- if (file && !unsupported.includes(file.type)) {
- const ext = extname(file.name).toLowerCase();
- if (AcceptableMedia.imageFormats.includes(ext)) {
- validated.push(file);
- }
- }
- }
+// const validated: File[] = [];
+// for (let i = 0; i < files.length; i++) {
+// const file = files.item(i);
+// if (file && !unsupported.includes(file.type)) {
+// const ext = extname(file.name).toLowerCase();
+// if (AcceptableMedia.imageFormats.includes(ext)) {
+// validated.push(file);
+// }
+// }
+// }
- runInAction(() => {
- this.quota = validated.length;
- this.completed = 0;
- });
+// runInAction(() => {
+// this.quota = validated.length;
+// this.completed = 0;
+// });
- const sizes: number[] = [];
- const modifiedDates: number[] = [];
+// const sizes: number[] = [];
+// const modifiedDates: number[] = [];
- runInAction(() => (this.phase = `Internal: uploading ${this.quota - this.completed} files to Dash...`));
+// runInAction(() => (this.phase = `Internal: uploading ${this.quota - this.completed} files to Dash...`));
- const batched = BatchedArray.from(validated, { batchSize: 15 });
- const uploads = await batched.batchedMapAsync<Upload.FileResponse<Upload.ImageInformation>>(async (batch, collector) => {
- batch.forEach(file => {
- sizes.push(file.size);
- modifiedDates.push(file.lastModified);
- });
- collector.push(...(await Networking.UploadFilesToServer<Upload.ImageInformation>(batch.map(file => ({ file })))));
- runInAction(() => (this.completed += batch.length));
- });
+// const batched = BatchedArray.from(validated, { batchSize: 15 });
+// const uploads = await batched.batchedMapAsync<Upload.FileResponse<Upload.ImageInformation>>(async (batch, collector) => {
+// batch.forEach(file => {
+// sizes.push(file.size);
+// modifiedDates.push(file.lastModified);
+// });
+// collector.push(...(await Networking.UploadFilesToServer<Upload.ImageInformation>(batch.map(file => ({ file })))));
+// runInAction(() => (this.completed += batch.length));
+// });
- await Promise.all(
- uploads.map(async response => {
- const {
- source: { mimetype },
- result,
- } = response;
- if (result instanceof Error) {
- return;
- }
- const { accessPaths, exifData } = result;
- const path = Utils.prepend(accessPaths.agnostic.client);
- const document = mimetype && (await DocUtils.DocumentFromType(mimetype, path, { _width: 300 }));
- const { data, error } = exifData;
- if (document) {
- Doc.GetProto(document).exif = error || Doc.Get.FromJson({ data });
- docs.push(document);
- }
- })
- );
+// await Promise.all(
+// uploads.map(async response => {
+// const {
+// source: { mimetype },
+// result,
+// } = response;
+// if (result instanceof Error) {
+// return;
+// }
+// const { accessPaths, exifData } = result;
+// const path = Utils.prepend(accessPaths.agnostic.client);
+// const document = mimetype && (await DocUtils.DocumentFromType(mimetype, path, { _width: 300 }));
+// const { data, error } = exifData;
+// if (document) {
+// Doc.GetProto(document).exif = error || Doc.Get.FromJson({ data });
+// docs.push(document);
+// }
+// })
+// );
- for (let i = 0; i < docs.length; i++) {
- const doc = docs[i];
- doc.size = sizes[i];
- doc.modified = modifiedDates[i];
- this.entries.forEach(entry => {
- const target = entry.onDataDoc ? Doc.GetProto(doc) : doc;
- target[entry.key] = entry.value;
- });
- }
+// for (let i = 0; i < docs.length; i++) {
+// const doc = docs[i];
+// doc.size = sizes[i];
+// doc.modified = modifiedDates[i];
+// this.entries.forEach(entry => {
+// const target = entry.onDataDoc ? Doc.GetProto(doc) : doc;
+// target[entry.key] = entry.value;
+// });
+// }
- const doc = this.props.Document;
- const height: number = NumCast(doc.height) || 0;
- const offset: number = this.persistent ? (height === 0 ? 0 : height + 30) : 0;
- const options: DocumentOptions = {
- title: `Import of ${directory}`,
- _width: 1105,
- _height: 500,
- _chromeHidden: true,
- x: NumCast(doc.x),
- y: NumCast(doc.y) + offset,
- };
- const parent = this.props.DocumentView?.()._props.docViewPath().lastElement();
- if (parent?.Document.type === DocumentType.COL) {
- let importContainer: Doc;
- if (docs.length < 50) {
- importContainer = Docs.Create.MasonryDocument(docs, options);
- } else {
- const headers = [new SchemaHeaderField('title'), new SchemaHeaderField('size')];
- importContainer = Docs.Create.SchemaDocument(headers, docs, options);
- }
- runInAction(() => (this.phase = 'External: uploading files to Google Photos...'));
- await GooglePhotos.Export.CollectionToAlbum({ collection: importContainer });
- Doc.AddDocToList(Doc.GetProto(parent.props.Document), 'data', importContainer);
- !this.persistent && this.props.removeDocument && this.props.removeDocument(doc);
- DocumentManager.Instance.showDocument(importContainer, { willZoomCentered: true });
- }
+// const doc = this.props.Document;
+// const height: number = NumCast(doc.height) || 0;
+// const offset: number = this.persistent ? (height === 0 ? 0 : height + 30) : 0;
+// const options: DocumentOptions = {
+// title: `Import of ${directory}`,
+// _width: 1105,
+// _height: 500,
+// _chromeHidden: true,
+// x: NumCast(doc.x),
+// y: NumCast(doc.y) + offset,
+// };
+// const parent = this.props.DocumentView?.()._props.docViewPath().lastElement();
+// if (parent?.Document.type === DocumentType.COL) {
+// let importContainer: Doc;
+// if (docs.length < 50) {
+// importContainer = Docs.Create.MasonryDocument(docs, options);
+// } else {
+// const headers = [new SchemaHeaderField('title'), new SchemaHeaderField('size')];
+// importContainer = Docs.Create.SchemaDocument(headers, docs, options);
+// }
+// runInAction(() => (this.phase = 'External: uploading files to Google Photos...'));
+// await GooglePhotos.Export.CollectionToAlbum({ collection: importContainer });
+// Doc.AddDocToList(Doc.GetProto(parent.props.Document), 'data', importContainer);
+// !this.persistent && this.props.removeDocument && this.props.removeDocument(doc);
+// DocumentManager.Instance.showDocument(importContainer, { willZoomCentered: true });
+// }
- runInAction(() => {
- this.uploading = false;
- this.quota = 1;
- this.completed = 0;
- });
- };
+// runInAction(() => {
+// this.uploading = false;
+// this.quota = 1;
+// this.completed = 0;
+// });
+// };
- componentDidMount() {
- this.selector.current!.setAttribute('directory', '');
- this.selector.current!.setAttribute('webkitdirectory', '');
- this.disposer = reaction(
- () => this.completed,
- completed => runInAction(() => (this.phase = `Internal: uploading ${this.quota - completed} files to Dash...`))
- );
- }
+// componentDidMount() {
+// this.selector.current!.setAttribute('directory', '');
+// this.selector.current!.setAttribute('webkitdirectory', '');
+// this.disposer = reaction(
+// () => this.completed,
+// completed => runInAction(() => (this.phase = `Internal: uploading ${this.quota - completed} files to Dash...`))
+// );
+// }
- componentWillUnmount() {
- this.disposer && this.disposer();
- }
+// componentWillUnmount() {
+// this.disposer && this.disposer();
+// }
- @action
- preserveCentering = (rect: ContentRect) => {
- const bounds = rect.offset!;
- if (bounds.width === 0 || bounds.height === 0) {
- return;
- }
- const offset = this.dimensions / 2;
- this.left = bounds.width / 2 - offset;
- this.top = bounds.height / 2 - offset;
- };
+// @action
+// preserveCentering = (rect: ContentRect) => {
+// const bounds = rect.offset!;
+// if (bounds.width === 0 || bounds.height === 0) {
+// return;
+// }
+// const offset = this.dimensions / 2;
+// this.left = bounds.width / 2 - offset;
+// this.top = bounds.height / 2 - offset;
+// };
- @action
- addMetadataEntry = async () => {
- const entryDoc = new Doc();
- entryDoc.checked = false;
- entryDoc.key = keyPlaceholder;
- entryDoc.value = valuePlaceholder;
- Doc.AddDocToList(this.props.Document, 'data', entryDoc);
- };
+// @action
+// addMetadataEntry = async () => {
+// const entryDoc = new Doc();
+// entryDoc.checked = false;
+// entryDoc.key = keyPlaceholder;
+// entryDoc.value = valuePlaceholder;
+// Doc.AddDocToList(this.props.Document, 'data', entryDoc);
+// };
- @action
- remove = async (entry: ImportMetadataEntry) => {
- const metadata = await DocListCastAsync(this.props.Document.data);
- if (metadata) {
- let index = this.entries.indexOf(entry);
- if (index !== -1) {
- runInAction(() => this.entries.splice(index, 1));
- index = metadata.indexOf(entry.props.Document);
- if (index !== -1) {
- metadata.splice(index, 1);
- }
- }
- }
- };
+// @action
+// remove = async (entry: ImportMetadataEntry) => {
+// const metadata = await DocListCastAsync(this.props.Document.data);
+// if (metadata) {
+// let index = this.entries.indexOf(entry);
+// if (index !== -1) {
+// runInAction(() => this.entries.splice(index, 1));
+// index = metadata.indexOf(entry.props.Document);
+// if (index !== -1) {
+// metadata.splice(index, 1);
+// }
+// }
+// }
+// };
- render() {
- const dimensions = 50;
- const entries = DocListCast(this.props.Document.data);
- const isEditing = this.editingMetadata;
- const completed = this.completed;
- const quota = this.quota;
- const uploading = this.uploading;
- const showRemoveLabel = this.removeHover;
- const persistent = this.persistent;
- let percent = `${(completed / quota) * 100}`;
- percent = percent.split('.')[0];
- percent = percent.startsWith('100') ? '99' : percent;
- const marginOffset = (percent.length === 1 ? 5 : 0) - 1.6;
- const message = <span className={'phase'}>{this.phase}</span>;
- const centerPiece = this.phase.includes('Google Photos') ? (
- <img
- src={'/assets/google_photos.png'}
- style={{
- transition: '0.4s opacity ease',
- width: 30,
- height: 30,
- opacity: uploading ? 1 : 0,
- pointerEvents: 'none',
- position: 'absolute',
- left: 12,
- top: this.top + 10,
- fontSize: 18,
- color: 'white',
- marginLeft: this.left + marginOffset,
- }}
- />
- ) : (
- <div
- style={{
- transition: '0.4s opacity ease',
- opacity: uploading ? 1 : 0,
- pointerEvents: 'none',
- position: 'absolute',
- left: 10,
- top: this.top + 12.3,
- fontSize: 18,
- color: 'white',
- marginLeft: this.left + marginOffset,
- }}>
- {percent}%
- </div>
- );
- return (
- <Measure offset onResize={this.preserveCentering}>
- {({ measureRef }) => (
- <div ref={measureRef} style={{ width: '100%', height: '100%', pointerEvents: 'all' }}>
- {message}
- <input
- id={'selector'}
- ref={this.selector}
- onChange={this.handleSelection}
- type="file"
- style={{
- position: 'absolute',
- display: 'none',
- }}
- />
- <label
- htmlFor={'selector'}
- style={{
- opacity: isEditing ? 0 : 1,
- pointerEvents: isEditing ? 'none' : 'all',
- transition: '0.4s ease opacity',
- }}>
- <div
- style={{
- width: dimensions,
- height: dimensions,
- borderRadius: '50%',
- background: 'black',
- position: 'absolute',
- left: this.left,
- top: this.top,
- }}
- />
- <div
- style={{
- position: 'absolute',
- left: this.left + 8,
- top: this.top + 10,
- opacity: uploading ? 0 : 1,
- transition: '0.4s opacity ease',
- }}>
- <FontAwesomeIcon icon={'cloud-upload-alt'} color="#FFFFFF" size={'2x'} />
- </div>
- <img
- style={{
- width: 80,
- height: 80,
- transition: '0.4s opacity ease',
- opacity: uploading ? 0.7 : 0,
- position: 'absolute',
- top: this.top - 15,
- left: this.left - 15,
- }}
- src={'/assets/loading.gif'}></img>
- </label>
- <input
- type={'checkbox'}
- onChange={e => runInAction(() => (this.persistent = e.target.checked))}
- style={{
- margin: 0,
- position: 'absolute',
- left: 10,
- bottom: 10,
- opacity: isEditing || uploading ? 0 : 1,
- transition: '0.4s opacity ease',
- pointerEvents: isEditing || uploading ? 'none' : 'all',
- }}
- checked={this.persistent}
- onPointerEnter={action(() => (this.removeHover = true))}
- onPointerLeave={action(() => (this.removeHover = false))}
- />
- <p
- style={{
- position: 'absolute',
- left: 27,
- bottom: 8.4,
- fontSize: 12,
- opacity: showRemoveLabel ? 1 : 0,
- transition: '0.4s opacity ease',
- }}>
- Template will be <span style={{ textDecoration: 'underline', textDecorationColor: persistent ? 'green' : 'red', color: persistent ? 'green' : 'red' }}>{persistent ? 'kept' : 'removed'}</span> after upload
- </p>
- {centerPiece}
- <div
- style={{
- position: 'absolute',
- top: 10,
- right: 10,
- borderRadius: '50%',
- width: 25,
- height: 25,
- background: 'black',
- pointerEvents: uploading ? 'none' : 'all',
- opacity: uploading ? 0 : 1,
- transition: '0.4s opacity ease',
- }}
- title={isEditing ? 'Back to Upload' : 'Add Metadata'}
- onClick={action(() => (this.editingMetadata = !this.editingMetadata))}
- />
- <FontAwesomeIcon
- style={{
- pointerEvents: 'none',
- position: 'absolute',
- right: isEditing ? 14 : 15,
- top: isEditing ? 15.4 : 16,
- opacity: uploading ? 0 : 1,
- transition: '0.4s opacity ease',
- }}
- icon={isEditing ? 'cloud-upload-alt' : 'tag'}
- color="#FFFFFF"
- size={'1x'}
- />
- <div
- style={{
- transition: '0.4s ease opacity',
- width: '100%',
- height: '100%',
- pointerEvents: isEditing ? 'all' : 'none',
- opacity: isEditing ? 1 : 0,
- overflowY: 'scroll',
- }}>
- <div
- style={{
- borderRadius: '50%',
- width: 25,
- height: 25,
- marginLeft: 10,
- position: 'absolute',
- right: 41,
- top: 10,
- }}
- title={'Add Metadata Entry'}
- onClick={this.addMetadataEntry}>
- <FontAwesomeIcon
- style={{
- pointerEvents: 'none',
- marginLeft: 6.4,
- marginTop: 5.2,
- }}
- icon={'plus'}
- size={'1x'}
- />
- </div>
- <p style={{ paddingLeft: 10, paddingTop: 8, paddingBottom: 7 }}>Add metadata to your import...</p>
- <hr style={{ margin: '6px 10px 12px 10px' }} />
- {entries.map(doc => (
- <ImportMetadataEntry
- Document={doc}
- key={doc[Id]}
- remove={this.remove}
- ref={el => {
- if (el) this.entries.push(el);
- }}
- next={this.addMetadataEntry}
- />
- ))}
- </div>
- </div>
- )}
- </Measure>
- );
- }
-}
+// render() {
+// const dimensions = 50;
+// const entries = DocListCast(this.props.Document.data);
+// const isEditing = this.editingMetadata;
+// const completed = this.completed;
+// const quota = this.quota;
+// const uploading = this.uploading;
+// const showRemoveLabel = this.removeHover;
+// const persistent = this.persistent;
+// let percent = `${(completed / quota) * 100}`;
+// percent = percent.split('.')[0];
+// percent = percent.startsWith('100') ? '99' : percent;
+// const marginOffset = (percent.length === 1 ? 5 : 0) - 1.6;
+// const message = <span className={'phase'}>{this.phase}</span>;
+// const centerPiece = this.phase.includes('Google Photos') ? (
+// <img
+// src={'/assets/google_photos.png'}
+// style={{
+// transition: '0.4s opacity ease',
+// width: 30,
+// height: 30,
+// opacity: uploading ? 1 : 0,
+// pointerEvents: 'none',
+// position: 'absolute',
+// left: 12,
+// top: this.top + 10,
+// fontSize: 18,
+// color: 'white',
+// marginLeft: this.left + marginOffset,
+// }}
+// />
+// ) : (
+// <div
+// style={{
+// transition: '0.4s opacity ease',
+// opacity: uploading ? 1 : 0,
+// pointerEvents: 'none',
+// position: 'absolute',
+// left: 10,
+// top: this.top + 12.3,
+// fontSize: 18,
+// color: 'white',
+// marginLeft: this.left + marginOffset,
+// }}>
+// {percent}%
+// </div>
+// );
+// return (
+// <Measure offset onResize={this.preserveCentering}>
+// {({ measureRef }) => (
+// <div ref={measureRef} style={{ width: '100%', height: '100%', pointerEvents: 'all' }}>
+// {message}
+// <input
+// id={'selector'}
+// ref={this.selector}
+// onChange={this.handleSelection}
+// type="file"
+// style={{
+// position: 'absolute',
+// display: 'none',
+// }}
+// />
+// <label
+// htmlFor={'selector'}
+// style={{
+// opacity: isEditing ? 0 : 1,
+// pointerEvents: isEditing ? 'none' : 'all',
+// transition: '0.4s ease opacity',
+// }}>
+// <div
+// style={{
+// width: dimensions,
+// height: dimensions,
+// borderRadius: '50%',
+// background: 'black',
+// position: 'absolute',
+// left: this.left,
+// top: this.top,
+// }}
+// />
+// <div
+// style={{
+// position: 'absolute',
+// left: this.left + 8,
+// top: this.top + 10,
+// opacity: uploading ? 0 : 1,
+// transition: '0.4s opacity ease',
+// }}>
+// <FontAwesomeIcon icon={'cloud-upload-alt'} color="#FFFFFF" size={'2x'} />
+// </div>
+// <img
+// style={{
+// width: 80,
+// height: 80,
+// transition: '0.4s opacity ease',
+// opacity: uploading ? 0.7 : 0,
+// position: 'absolute',
+// top: this.top - 15,
+// left: this.left - 15,
+// }}
+// src={'/assets/loading.gif'}></img>
+// </label>
+// <input
+// type={'checkbox'}
+// onChange={e => runInAction(() => (this.persistent = e.target.checked))}
+// style={{
+// margin: 0,
+// position: 'absolute',
+// left: 10,
+// bottom: 10,
+// opacity: isEditing || uploading ? 0 : 1,
+// transition: '0.4s opacity ease',
+// pointerEvents: isEditing || uploading ? 'none' : 'all',
+// }}
+// checked={this.persistent}
+// onPointerEnter={action(() => (this.removeHover = true))}
+// onPointerLeave={action(() => (this.removeHover = false))}
+// />
+// <p
+// style={{
+// position: 'absolute',
+// left: 27,
+// bottom: 8.4,
+// fontSize: 12,
+// opacity: showRemoveLabel ? 1 : 0,
+// transition: '0.4s opacity ease',
+// }}>
+// Template will be <span style={{ textDecoration: 'underline', textDecorationColor: persistent ? 'green' : 'red', color: persistent ? 'green' : 'red' }}>{persistent ? 'kept' : 'removed'}</span> after upload
+// </p>
+// {centerPiece}
+// <div
+// style={{
+// position: 'absolute',
+// top: 10,
+// right: 10,
+// borderRadius: '50%',
+// width: 25,
+// height: 25,
+// background: 'black',
+// pointerEvents: uploading ? 'none' : 'all',
+// opacity: uploading ? 0 : 1,
+// transition: '0.4s opacity ease',
+// }}
+// title={isEditing ? 'Back to Upload' : 'Add Metadata'}
+// onClick={action(() => (this.editingMetadata = !this.editingMetadata))}
+// />
+// <FontAwesomeIcon
+// style={{
+// pointerEvents: 'none',
+// position: 'absolute',
+// right: isEditing ? 14 : 15,
+// top: isEditing ? 15.4 : 16,
+// opacity: uploading ? 0 : 1,
+// transition: '0.4s opacity ease',
+// }}
+// icon={isEditing ? 'cloud-upload-alt' : 'tag'}
+// color="#FFFFFF"
+// size={'1x'}
+// />
+// <div
+// style={{
+// transition: '0.4s ease opacity',
+// width: '100%',
+// height: '100%',
+// pointerEvents: isEditing ? 'all' : 'none',
+// opacity: isEditing ? 1 : 0,
+// overflowY: 'scroll',
+// }}>
+// <div
+// style={{
+// borderRadius: '50%',
+// width: 25,
+// height: 25,
+// marginLeft: 10,
+// position: 'absolute',
+// right: 41,
+// top: 10,
+// }}
+// title={'Add Metadata Entry'}
+// onClick={this.addMetadataEntry}>
+// <FontAwesomeIcon
+// style={{
+// pointerEvents: 'none',
+// marginLeft: 6.4,
+// marginTop: 5.2,
+// }}
+// icon={'plus'}
+// size={'1x'}
+// />
+// </div>
+// <p style={{ paddingLeft: 10, paddingTop: 8, paddingBottom: 7 }}>Add metadata to your import...</p>
+// <hr style={{ margin: '6px 10px 12px 10px' }} />
+// {entries.map(doc => (
+// <ImportMetadataEntry
+// Document={doc}
+// key={doc[Id]}
+// remove={this.remove}
+// ref={el => {
+// if (el) this.entries.push(el);
+// }}
+// next={this.addMetadataEntry}
+// />
+// ))}
+// </div>
+// </div>
+// )}
+// </Measure>
+// );
+// }
+// }
diff --git a/src/client/views/nodes/DocumentContentsView.tsx b/src/client/views/nodes/DocumentContentsView.tsx
index 82d346206..c615e7ff0 100644
--- a/src/client/views/nodes/DocumentContentsView.tsx
+++ b/src/client/views/nodes/DocumentContentsView.tsx
@@ -6,7 +6,6 @@ import { ScriptField } from '../../../fields/ScriptField';
import { Cast, StrCast } from '../../../fields/Types';
import { GetEffectiveAcl, TraceMobx } from '../../../fields/util';
import { emptyPath, OmitKeys, Without } from '../../../Utils';
-import { DirectoryImportBox } from '../../util/Import & Export/DirectoryImportBox';
import { CollectionDockingView } from '../collections/CollectionDockingView';
import { CollectionFreeFormView } from '../collections/collectionFreeForm/CollectionFreeFormView';
import { CollectionSchemaView } from '../collections/collectionSchema/CollectionSchemaView';
@@ -247,7 +246,6 @@ export class DocumentContentsView extends React.Component<
components={{
FormattedTextBox,
ImageBox,
- DirectoryImportBox,
FontIconBox,
LabelBox,
EquationBox,
diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts
index 4c2004681..5feb25fd4 100644
--- a/src/server/ApiManagers/GooglePhotosManager.ts
+++ b/src/server/ApiManagers/GooglePhotosManager.ts
@@ -1,324 +1,324 @@
-import ApiManager, { Registration } from './ApiManager';
-import { Method, _error, _success, _invalid } from '../RouteManager';
-import * as path from 'path';
-import { GoogleApiServerUtils } from '../apis/google/GoogleApiServerUtils';
-import { BatchedArray, TimeUnit } from 'array-batcher';
-import { Opt } from '../../fields/Doc';
-import { DashUploadUtils, InjectSize, SizeSuffix } from '../DashUploadUtils';
-import { Database } from '../database';
-import { red } from 'colors';
-import { Upload } from '../SharedMediaTypes';
-import * as request from 'request-promise';
-import { NewMediaItemResult } from '../apis/google/SharedTypes';
+// import ApiManager, { Registration } from './ApiManager';
+// import { Method, _error, _success, _invalid } from '../RouteManager';
+// import * as path from 'path';
+// import { GoogleApiServerUtils } from '../apis/google/GoogleApiServerUtils';
+// import { BatchedArray, TimeUnit } from 'array-batcher';
+// import { Opt } from '../../fields/Doc';
+// import { DashUploadUtils, InjectSize, SizeSuffix } from '../DashUploadUtils';
+// import { Database } from '../database';
+// import { red } from 'colors';
+// import { Upload } from '../SharedMediaTypes';
+// import * as request from 'request-promise';
+// import { NewMediaItemResult } from '../apis/google/SharedTypes';
-const prefix = 'google_photos_';
-const remoteUploadError = "None of the preliminary uploads to Google's servers was successful.";
-const authenticationError = 'Unable to authenticate Google credentials before uploading to Google Photos!';
-const mediaError = 'Unable to convert all uploaded bytes to media items!';
-const localUploadError = (count: number) => `Unable to upload ${count} images to Dash's server`;
-const requestError = "Unable to execute download: the body's media items were malformed.";
-const downloadError = 'Encountered an error while executing downloads.';
+// const prefix = 'google_photos_';
+// const remoteUploadError = "None of the preliminary uploads to Google's servers was successful.";
+// const authenticationError = 'Unable to authenticate Google credentials before uploading to Google Photos!';
+// const mediaError = 'Unable to convert all uploaded bytes to media items!';
+// const localUploadError = (count: number) => `Unable to upload ${count} images to Dash's server`;
+// const requestError = "Unable to execute download: the body's media items were malformed.";
+// const downloadError = 'Encountered an error while executing downloads.';
-interface GooglePhotosUploadFailure {
- batch: number;
- index: number;
- url: string;
- reason: string;
-}
+// interface GooglePhotosUploadFailure {
+// batch: number;
+// index: number;
+// url: string;
+// reason: string;
+// }
-interface MediaItem {
- baseUrl: string;
-}
+// interface MediaItem {
+// baseUrl: string;
+// }
-interface NewMediaItem {
- description: string;
- simpleMediaItem: {
- uploadToken: string;
- };
-}
+// interface NewMediaItem {
+// description: string;
+// simpleMediaItem: {
+// uploadToken: string;
+// };
+// }
-/**
- * This manager handles the creation of routes for google photos functionality.
- */
-export default class GooglePhotosManager extends ApiManager {
- protected initialize(register: Registration): void {
- /**
- * This route receives a list of urls that point to images stored
- * on Dash's file system, and, in a two step process, uploads them to Google's servers and
- * returns the information Google generates about the associated uploaded remote images.
- */
- register({
- method: Method.POST,
- subscription: '/googlePhotosMediaPost',
- secureHandler: async ({ user, req, res }) => {
- const { media } = req.body;
+// /**
+// * This manager handles the creation of routes for google photos functionality.
+// */
+// export default class GooglePhotosManager extends ApiManager {
+// protected initialize(register: Registration): void {
+// /**
+// * This route receives a list of urls that point to images stored
+// * on Dash's file system, and, in a two step process, uploads them to Google's servers and
+// * returns the information Google generates about the associated uploaded remote images.
+// */
+// register({
+// method: Method.POST,
+// subscription: '/googlePhotosMediaPost',
+// secureHandler: async ({ user, req, res }) => {
+// const { media } = req.body;
- // first we need to ensure that we know the google account to which these photos will be uploaded
- const token = (await GoogleApiServerUtils.retrieveCredentials(user.id))?.credentials?.access_token;
- if (!token) {
- return _error(res, authenticationError);
- }
+// // first we need to ensure that we know the google account to which these photos will be uploaded
+// const token = (await GoogleApiServerUtils.retrieveCredentials(user.id))?.credentials?.access_token;
+// if (!token) {
+// return _error(res, authenticationError);
+// }
- // next, having one large list or even synchronously looping over things trips a threshold
- // set on Google's servers, and would instantly return an error. So, we ease things out and send the photos to upload in
- // batches of 25, where the next batch is sent 100 millieconds after we receive a response from Google's servers.
- const failed: GooglePhotosUploadFailure[] = [];
- const batched = BatchedArray.from<Uploader.UploadSource>(media, { batchSize: 25 });
- const interval = { magnitude: 100, unit: TimeUnit.Milliseconds };
- const newMediaItems = await batched.batchedMapPatientInterval<NewMediaItem>(interval, async (batch, collector, { completedBatches }) => {
- for (let index = 0; index < batch.length; index++) {
- const { url, description } = batch[index];
- // a local function used to record failure of an upload
- const fail = (reason: string) => failed.push({ reason, batch: completedBatches + 1, index, url });
- // see image resizing - we store the size-agnostic url in our logic, but write out size-suffixed images to the file system
- // so here, given a size agnostic url, we're just making that conversion so that the file system knows which bytes to actually upload
- const imageToUpload = InjectSize(url, SizeSuffix.Original);
- // STEP 1/2: send the raw bytes of the image from our server to Google's servers. We'll get back an upload token
- // which acts as a pointer to those bytes that we can use to locate them later on
- const uploadToken = await Uploader.SendBytes(token, imageToUpload).catch(fail);
- if (!uploadToken) {
- fail(`${path.extname(url)} is not an accepted extension`);
- } else {
- // gather the upload token return from Google (a pointer they give us to the raw, currently useless bytes
- // we've uploaded to their servers) and put in the JSON format that the API accepts for image creation (used soon, below)
- collector.push({
- description,
- simpleMediaItem: { uploadToken },
- });
- }
- }
- });
+// // next, having one large list or even synchronously looping over things trips a threshold
+// // set on Google's servers, and would instantly return an error. So, we ease things out and send the photos to upload in
+// // batches of 25, where the next batch is sent 100 millieconds after we receive a response from Google's servers.
+// const failed: GooglePhotosUploadFailure[] = [];
+// const batched = BatchedArray.from<Uploader.UploadSource>(media, { batchSize: 25 });
+// const interval = { magnitude: 100, unit: TimeUnit.Milliseconds };
+// const newMediaItems = await batched.batchedMapPatientInterval<NewMediaItem>(interval, async (batch, collector, { completedBatches }) => {
+// for (let index = 0; index < batch.length; index++) {
+// const { url, description } = batch[index];
+// // a local function used to record failure of an upload
+// const fail = (reason: string) => failed.push({ reason, batch: completedBatches + 1, index, url });
+// // see image resizing - we store the size-agnostic url in our logic, but write out size-suffixed images to the file system
+// // so here, given a size agnostic url, we're just making that conversion so that the file system knows which bytes to actually upload
+// const imageToUpload = InjectSize(url, SizeSuffix.Original);
+// // STEP 1/2: send the raw bytes of the image from our server to Google's servers. We'll get back an upload token
+// // which acts as a pointer to those bytes that we can use to locate them later on
+// const uploadToken = await Uploader.SendBytes(token, imageToUpload).catch(fail);
+// if (!uploadToken) {
+// fail(`${path.extname(url)} is not an accepted extension`);
+// } else {
+// // gather the upload token return from Google (a pointer they give us to the raw, currently useless bytes
+// // we've uploaded to their servers) and put in the JSON format that the API accepts for image creation (used soon, below)
+// collector.push({
+// description,
+// simpleMediaItem: { uploadToken },
+// });
+// }
+// }
+// });
- // inform the developer / server console of any failed upload attempts
- // does not abort the operation, since some subset of the uploads may have been successful
- const { length } = failed;
- if (length) {
- console.error(`Unable to upload ${length} image${length === 1 ? '' : 's'} to Google's servers`);
- console.log(failed.map(({ reason, batch, index, url }) => `@${batch}.${index}: ${url} failed:\n${reason}`).join('\n\n'));
- }
+// // inform the developer / server console of any failed upload attempts
+// // does not abort the operation, since some subset of the uploads may have been successful
+// const { length } = failed;
+// if (length) {
+// console.error(`Unable to upload ${length} image${length === 1 ? '' : 's'} to Google's servers`);
+// console.log(failed.map(({ reason, batch, index, url }) => `@${batch}.${index}: ${url} failed:\n${reason}`).join('\n\n'));
+// }
- // if none of the preliminary uploads was successful, no need to try and create images
- // report the failure to the client and return
- if (!newMediaItems.length) {
- console.error(red(`${remoteUploadError} Thus, aborting image creation. Please try again.`));
- _error(res, remoteUploadError);
- return;
- }
+// // if none of the preliminary uploads was successful, no need to try and create images
+// // report the failure to the client and return
+// if (!newMediaItems.length) {
+// console.error(red(`${remoteUploadError} Thus, aborting image creation. Please try again.`));
+// _error(res, remoteUploadError);
+// return;
+// }
- // STEP 2/2: create the media items and return the API's response to the client, along with any failures
- return Uploader.CreateMediaItems(token, newMediaItems, req.body.album).then(
- results => _success(res, { results, failed }),
- error => _error(res, mediaError, error)
- );
- },
- });
+// // STEP 2/2: create the media items and return the API's response to the client, along with any failures
+// return Uploader.CreateMediaItems(token, newMediaItems, req.body.album).then(
+// results => _success(res, { results, failed }),
+// error => _error(res, mediaError, error)
+// );
+// },
+// });
- /**
- * This route receives a list of urls that point to images
- * stored on Google's servers and (following a *rough* heuristic)
- * uploads each image to Dash's server if it hasn't already been uploaded.
- * Unfortunately, since Google has so many of these images on its servers,
- * these user content urls expire every 6 hours. So we can't store the url of a locally uploaded
- * Google image and compare the candidate url to it to figure out if we already have it,
- * since the same bytes on their server might now be associated with a new, random url.
- * So, we do the next best thing and try to use an intrinsic attribute of those bytes as
- * an identifier: the precise content size. This works in small cases, but has the obvious flaw of failing to upload
- * an image locally if we already have uploaded another Google user content image with the exact same content size.
- */
- register({
- method: Method.POST,
- subscription: '/googlePhotosMediaGet',
- secureHandler: async ({ req, res }) => {
- const { mediaItems } = req.body as { mediaItems: MediaItem[] };
- if (!mediaItems) {
- // non-starter, since the input was in an invalid format
- _invalid(res, requestError);
- return;
- }
- let failed = 0;
- const completed: Opt<Upload.ImageInformation>[] = [];
- for (const { baseUrl } of mediaItems) {
- // start by getting the content size of the remote image
- const results = await DashUploadUtils.InspectImage(baseUrl);
- if (results instanceof Error) {
- // if something went wrong here, we can't hope to upload it, so just move on to the next
- failed++;
- continue;
- }
- const { contentSize, ...attributes } = results;
- // check to see if we have uploaded a Google user content image *specifically via this route* already
- // that has this exact content size
- const found: Opt<Upload.ImageInformation> = await Database.Auxiliary.QueryUploadHistory(contentSize);
- if (!found) {
- // if we haven't, then upload it locally to Dash's server
- const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, undefined, prefix, false).catch(error => _error(res, downloadError, error));
- if (upload) {
- completed.push(upload);
- // inform the heuristic that we've encountered an image with this content size,
- // to be later checked against in future uploads
- await Database.Auxiliary.LogUpload(upload);
- } else {
- // make note of a failure to upload locallys
- failed++;
- }
- } else {
- // if we have, the variable 'found' is handily the upload information of the
- // existing image, so we add it to the list as if we had just uploaded it now without actually
- // making a duplicate write
- completed.push(found);
- }
- }
- // if there are any failures, report a general failure to the client
- if (failed) {
- return _error(res, localUploadError(failed));
- }
- // otherwise, return the image upload information list corresponding to the newly (or previously)
- // uploaded images
- _success(res, completed);
- },
- });
- }
-}
+// /**
+// * This route receives a list of urls that point to images
+// * stored on Google's servers and (following a *rough* heuristic)
+// * uploads each image to Dash's server if it hasn't already been uploaded.
+// * Unfortunately, since Google has so many of these images on its servers,
+// * these user content urls expire every 6 hours. So we can't store the url of a locally uploaded
+// * Google image and compare the candidate url to it to figure out if we already have it,
+// * since the same bytes on their server might now be associated with a new, random url.
+// * So, we do the next best thing and try to use an intrinsic attribute of those bytes as
+// * an identifier: the precise content size. This works in small cases, but has the obvious flaw of failing to upload
+// * an image locally if we already have uploaded another Google user content image with the exact same content size.
+// */
+// register({
+// method: Method.POST,
+// subscription: '/googlePhotosMediaGet',
+// secureHandler: async ({ req, res }) => {
+// const { mediaItems } = req.body as { mediaItems: MediaItem[] };
+// if (!mediaItems) {
+// // non-starter, since the input was in an invalid format
+// _invalid(res, requestError);
+// return;
+// }
+// let failed = 0;
+// const completed: Opt<Upload.ImageInformation>[] = [];
+// for (const { baseUrl } of mediaItems) {
+// // start by getting the content size of the remote image
+// const results = await DashUploadUtils.InspectImage(baseUrl);
+// if (results instanceof Error) {
+// // if something went wrong here, we can't hope to upload it, so just move on to the next
+// failed++;
+// continue;
+// }
+// const { contentSize, ...attributes } = results;
+// // check to see if we have uploaded a Google user content image *specifically via this route* already
+// // that has this exact content size
+// const found: Opt<Upload.ImageInformation> = await Database.Auxiliary.QueryUploadHistory(contentSize);
+// if (!found) {
+// // if we haven't, then upload it locally to Dash's server
+// const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, undefined, prefix, false).catch(error => _error(res, downloadError, error));
+// if (upload) {
+// completed.push(upload);
+// // inform the heuristic that we've encountered an image with this content size,
+// // to be later checked against in future uploads
+// await Database.Auxiliary.LogUpload(upload);
+// } else {
+// // make note of a failure to upload locallys
+// failed++;
+// }
+// } else {
+// // if we have, the variable 'found' is handily the upload information of the
+// // existing image, so we add it to the list as if we had just uploaded it now without actually
+// // making a duplicate write
+// completed.push(found);
+// }
+// }
+// // if there are any failures, report a general failure to the client
+// if (failed) {
+// return _error(res, localUploadError(failed));
+// }
+// // otherwise, return the image upload information list corresponding to the newly (or previously)
+// // uploaded images
+// _success(res, completed);
+// },
+// });
+// }
+// }
-/**
- * This namespace encompasses the logic
- * necessary to upload images to Google's server,
- * and then initialize / create those images in the Photos
- * API given the upload tokens returned from the initial
- * uploading process.
- *
- * https://developers.google.com/photos/library/reference/rest/v1/mediaItems/batchCreate
- */
-export namespace Uploader {
- /**
- * Specifies the structure of the object
- * necessary to upload bytes to Google's servers.
- * The url is streamed to access the image's bytes,
- * and the description is what appears in Google Photos'
- * description field.
- */
- export interface UploadSource {
- url: string;
- description: string;
- }
+// /**
+// * This namespace encompasses the logic
+// * necessary to upload images to Google's server,
+// * and then initialize / create those images in the Photos
+// * API given the upload tokens returned from the initial
+// * uploading process.
+// *
+// * https://developers.google.com/photos/library/reference/rest/v1/mediaItems/batchCreate
+// */
+// export namespace Uploader {
+// /**
+// * Specifies the structure of the object
+// * necessary to upload bytes to Google's servers.
+// * The url is streamed to access the image's bytes,
+// * and the description is what appears in Google Photos'
+// * description field.
+// */
+// export interface UploadSource {
+// url: string;
+// description: string;
+// }
- /**
- * This is the format needed to pass
- * into the BatchCreate API request
- * to take a reference to raw uploaded bytes
- * and actually create an image in Google Photos.
- *
- * So, to instantiate this interface you must have already dispatched an upload
- * and received an upload token.
- */
- export interface NewMediaItem {
- description: string;
- simpleMediaItem: {
- uploadToken: string;
- };
- }
+// /**
+// * This is the format needed to pass
+// * into the BatchCreate API request
+// * to take a reference to raw uploaded bytes
+// * and actually create an image in Google Photos.
+// *
+// * So, to instantiate this interface you must have already dispatched an upload
+// * and received an upload token.
+// */
+// export interface NewMediaItem {
+// description: string;
+// simpleMediaItem: {
+// uploadToken: string;
+// };
+// }
- /**
- * A utility function to streamline making
- * calls to the API's url - accentuates
- * the relative path in the caller.
- * @param extension the desired
- * subset of the API
- */
- function prepend(extension: string): string {
- return `https://photoslibrary.googleapis.com/v1/${extension}`;
- }
+// /**
+// * A utility function to streamline making
+// * calls to the API's url - accentuates
+// * the relative path in the caller.
+// * @param extension the desired
+// * subset of the API
+// */
+// function prepend(extension: string): string {
+// return `https://photoslibrary.googleapis.com/v1/${extension}`;
+// }
- /**
- * Factors out the creation of the API request's
- * authentication elements stored in the header.
- * @param type the contents of the request
- * @param token the user-specific Google access token
- */
- function headers(type: string, token: string) {
- return {
- 'Content-Type': `application/${type}`,
- Authorization: `Bearer ${token}`,
- };
- }
+// /**
+// * Factors out the creation of the API request's
+// * authentication elements stored in the header.
+// * @param type the contents of the request
+// * @param token the user-specific Google access token
+// */
+// function headers(type: string, token: string) {
+// return {
+// 'Content-Type': `application/${type}`,
+// Authorization: `Bearer ${token}`,
+// };
+// }
- /**
- * This is the first step in the remote image creation process.
- * Here we upload the raw bytes of the image to Google's servers by
- * setting authentication and other required header properties and including
- * the raw bytes to the image, to be uploaded, in the body of the request.
- * @param bearerToken the user-specific Google access token, specifies the account associated
- * with the eventual image creation
- * @param url the url of the image to upload
- * @param filename an optional name associated with the uploaded image - if not specified
- * defaults to the filename (basename) in the url
- */
- export const SendBytes = async (bearerToken: string, url: string, filename?: string): Promise<any> => {
- // check if the url points to a non-image or an unsupported format
- if (!DashUploadUtils.validateExtension(url)) {
- return undefined;
- }
- const body = await request(url, { encoding: null }); // returns a readable stream with the unencoded binary image data
- const parameters = {
- method: 'POST',
- uri: prepend('uploads'),
- headers: {
- ...headers('octet-stream', bearerToken),
- 'X-Goog-Upload-File-Name': filename || path.basename(url),
- 'X-Goog-Upload-Protocol': 'raw',
- },
- body,
- };
- return new Promise((resolve, reject) =>
- request(parameters, (error, _response, body) => {
- if (error) {
- // on rejection, the server logs the error and the offending image
- return reject(error);
- }
- resolve(body);
- })
- );
- };
+// /**
+// * This is the first step in the remote image creation process.
+// * Here we upload the raw bytes of the image to Google's servers by
+// * setting authentication and other required header properties and including
+// * the raw bytes to the image, to be uploaded, in the body of the request.
+// * @param bearerToken the user-specific Google access token, specifies the account associated
+// * with the eventual image creation
+// * @param url the url of the image to upload
+// * @param filename an optional name associated with the uploaded image - if not specified
+// * defaults to the filename (basename) in the url
+// */
+// export const SendBytes = async (bearerToken: string, url: string, filename?: string): Promise<any> => {
+// // check if the url points to a non-image or an unsupported format
+// if (!DashUploadUtils.validateExtension(url)) {
+// return undefined;
+// }
+// const body = await request(url, { encoding: null }); // returns a readable stream with the unencoded binary image data
+// const parameters = {
+// method: 'POST',
+// uri: prepend('uploads'),
+// headers: {
+// ...headers('octet-stream', bearerToken),
+// 'X-Goog-Upload-File-Name': filename || path.basename(url),
+// 'X-Goog-Upload-Protocol': 'raw',
+// },
+// body,
+// };
+// return new Promise((resolve, reject) =>
+// request(parameters, (error, _response, body) => {
+// if (error) {
+// // on rejection, the server logs the error and the offending image
+// return reject(error);
+// }
+// resolve(body);
+// })
+// );
+// };
- /**
- * This is the second step in the remote image creation process: having uploaded
- * the raw bytes of the image and received / stored pointers (upload tokens) to those
- * bytes, we can now instruct the API to finalize the creation of those images by
- * submitting a batch create request with the list of upload tokens and the description
- * to be associated with reach resulting new image.
- * @param bearerToken the user-specific Google access token, specifies the account associated
- * with the eventual image creation
- * @param newMediaItems a list of objects containing a description and, effectively, the
- * pointer to the uploaded bytes
- * @param album if included, will add all of the newly created remote images to the album
- * with the specified id
- */
- export const CreateMediaItems = async (bearerToken: string, newMediaItems: NewMediaItem[], album?: { id: string }): Promise<NewMediaItemResult[]> => {
- // it's important to note that the API can't handle more than 50 items in each request and
- // seems to need at least some latency between requests (spamming it synchronously has led to the server returning errors)...
- const batched = BatchedArray.from(newMediaItems, { batchSize: 50 });
- // ...so we execute them in delayed batches and await the entire execution
- return batched.batchedMapPatientInterval({ magnitude: 100, unit: TimeUnit.Milliseconds }, async (batch: NewMediaItem[], collector): Promise<void> => {
- const parameters = {
- method: 'POST',
- headers: headers('json', bearerToken),
- uri: prepend('mediaItems:batchCreate'),
- body: { newMediaItems: batch } as any,
- json: true,
- };
- // register the target album, if provided
- album && (parameters.body.albumId = album.id);
- collector.push(
- ...(await new Promise<NewMediaItemResult[]>((resolve, reject) => {
- request(parameters, (error, _response, body) => {
- if (error) {
- reject(error);
- } else {
- resolve(body.newMediaItemResults);
- }
- });
- }))
- );
- });
- };
-}
+// /**
+// * This is the second step in the remote image creation process: having uploaded
+// * the raw bytes of the image and received / stored pointers (upload tokens) to those
+// * bytes, we can now instruct the API to finalize the creation of those images by
+// * submitting a batch create request with the list of upload tokens and the description
+// * to be associated with reach resulting new image.
+// * @param bearerToken the user-specific Google access token, specifies the account associated
+// * with the eventual image creation
+// * @param newMediaItems a list of objects containing a description and, effectively, the
+// * pointer to the uploaded bytes
+// * @param album if included, will add all of the newly created remote images to the album
+// * with the specified id
+// */
+// export const CreateMediaItems = async (bearerToken: string, newMediaItems: NewMediaItem[], album?: { id: string }): Promise<NewMediaItemResult[]> => {
+// // it's important to note that the API can't handle more than 50 items in each request and
+// // seems to need at least some latency between requests (spamming it synchronously has led to the server returning errors)...
+// const batched = BatchedArray.from(newMediaItems, { batchSize: 50 });
+// // ...so we execute them in delayed batches and await the entire execution
+// return batched.batchedMapPatientInterval({ magnitude: 100, unit: TimeUnit.Milliseconds }, async (batch: NewMediaItem[], collector): Promise<void> => {
+// const parameters = {
+// method: 'POST',
+// headers: headers('json', bearerToken),
+// uri: prepend('mediaItems:batchCreate'),
+// body: { newMediaItems: batch } as any,
+// json: true,
+// };
+// // register the target album, if provided
+// album && (parameters.body.albumId = album.id);
+// collector.push(
+// ...(await new Promise<NewMediaItemResult[]>((resolve, reject) => {
+// request(parameters, (error, _response, body) => {
+// if (error) {
+// reject(error);
+// } else {
+// resolve(body.newMediaItemResults);
+// }
+// });
+// }))
+// );
+// });
+// };
+// }
diff --git a/src/server/index.ts b/src/server/index.ts
index e4aadf696..47c37c9f0 100644
--- a/src/server/index.ts
+++ b/src/server/index.ts
@@ -8,7 +8,7 @@ import DataVizManager from './ApiManagers/DataVizManager';
import DeleteManager from './ApiManagers/DeleteManager';
import DownloadManager from './ApiManagers/DownloadManager';
import GeneralGoogleManager from './ApiManagers/GeneralGoogleManager';
-import GooglePhotosManager from './ApiManagers/GooglePhotosManager';
+//import GooglePhotosManager from './ApiManagers/GooglePhotosManager';
import { SearchManager } from './ApiManagers/SearchManager';
import SessionManager from './ApiManagers/SessionManager';
import UploadManager from './ApiManagers/UploadManager';
@@ -63,7 +63,7 @@ async function preliminaryFunctions() {
* with the server
*/
function routeSetter({ isRelease, addSupervisedRoute, logRegistrationOutcome }: RouteManager) {
- const managers = [new SessionManager(), new UserManager(), new UploadManager(), new DownloadManager(), new SearchManager(), new DeleteManager(), new UtilManager(), new GeneralGoogleManager(), new GooglePhotosManager(), new DataVizManager()];
+ const managers = [new SessionManager(), new UserManager(), new UploadManager(), new DownloadManager(), new SearchManager(), new DeleteManager(), new UtilManager(), new GeneralGoogleManager(), /* new GooglePhotosManager(),*/ new DataVizManager()];
// initialize API Managers
console.log(yellow('\nregistering server routes...'));