diff options
Diffstat (limited to 'src/server')
-rw-r--r-- | src/server/DashUploadUtils.ts | 171 | ||||
-rw-r--r-- | src/server/Message.ts | 1 | ||||
-rw-r--r-- | src/server/RouteStore.ts | 6 | ||||
-rw-r--r-- | src/server/apis/google/GoogleApiServerUtils.ts | 145 | ||||
-rw-r--r-- | src/server/apis/google/GooglePhotosUploadUtils.ts | 165 | ||||
-rw-r--r-- | src/server/apis/google/SharedTypes.ts | 21 | ||||
-rw-r--r-- | src/server/authentication/config/passport.ts | 5 | ||||
-rw-r--r-- | src/server/authentication/models/current_user_utils.ts | 6 | ||||
-rw-r--r-- | src/server/credentials/google_docs_credentials.json | 12 | ||||
-rw-r--r-- | src/server/credentials/google_docs_token.json | 1 | ||||
-rw-r--r-- | src/server/database.ts | 430 | ||||
-rw-r--r-- | src/server/index.ts | 268 | ||||
-rw-r--r-- | src/server/updateSearch.ts | 123 |
13 files changed, 799 insertions, 555 deletions
diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts new file mode 100644 index 000000000..4230e9b17 --- /dev/null +++ b/src/server/DashUploadUtils.ts @@ -0,0 +1,171 @@ +import * as fs from 'fs'; +import { Utils } from '../Utils'; +import * as path from 'path'; +import * as sharp from 'sharp'; +import request = require('request-promise'); + +const uploadDirectory = path.join(__dirname, './public/files/'); + +export namespace DashUploadUtils { + + export interface Size { + width: number; + suffix: string; + } + + export const Sizes: { [size: string]: Size } = { + SMALL: { width: 100, suffix: "_s" }, + MEDIUM: { width: 400, suffix: "_m" }, + LARGE: { width: 900, suffix: "_l" }, + }; + + const gifs = [".gif"]; + const pngs = [".png"]; + const jpgs = [".jpg", ".jpeg"]; + const imageFormats = [...pngs, ...jpgs, ...gifs]; + const videoFormats = [".mov", ".mp4"]; + + const size = "content-length"; + const type = "content-type"; + + export interface UploadInformation { + mediaPaths: string[]; + fileNames: { [key: string]: string }; + contentSize?: number; + contentType?: string; + } + + const generate = (prefix: string, url: string) => `${prefix}upload_${Utils.GenerateGuid()}${path.extname(url).toLowerCase()}`; + const sanitize = (filename: string) => filename.replace(/\s+/g, "_"); + + /** + * Uploads an image specified by the @param source to Dash's /public/files/ + * directory, and returns information generated during that upload + * + * @param {string} source is either the absolute path of an already uploaded image or + * the url of a remote image + * @param {string} filename dictates what to call the image. If not specified, + * the name {@param prefix}_upload_{GUID} + * @param {string} prefix is a string prepended to the generated image name in the + * event that @param filename is not specified + * + * @returns {UploadInformation} This method returns + * 1) the paths to the uploaded images (plural due to resizing) + * 2) the file name of each of the resized images + * 3) the size of the image, in bytes (4432130) + * 4) the content type of the image, i.e. image/(jpeg | png | ...) + */ + export const UploadImage = async (source: string, filename?: string, prefix: string = ""): Promise<UploadInformation> => { + const metadata = await InspectImage(source); + return UploadInspectedImage(metadata, filename, prefix); + }; + + export interface InspectionResults { + isLocal: boolean; + stream: any; + normalizedUrl: string; + contentSize?: number; + contentType?: string; + } + + /** + * Based on the url's classification as local or remote, gleans + * as much information as possible about the specified image + * + * @param source is the path or url to the image in question + */ + export const InspectImage = async (source: string): Promise<InspectionResults> => { + const { isLocal, stream, normalized: normalizedUrl } = classify(source); + const results = { + isLocal, + stream, + normalizedUrl + }; + // stop here if local, since request.head() can't handle local paths, only urls on the web + if (isLocal) { + return results; + } + const metadata = (await new Promise<any>((resolve, reject) => { + request.head(source, async (error, res) => { + if (error) { + return reject(error); + } + resolve(res); + }); + })).headers; + return { + contentSize: parseInt(metadata[size]), + contentType: metadata[type], + ...results + }; + }; + + export const UploadInspectedImage = async (metadata: InspectionResults, filename?: string, prefix = ""): Promise<UploadInformation> => { + const { isLocal, stream, normalizedUrl, contentSize, contentType } = metadata; + const resolved = filename ? sanitize(filename) : generate(prefix, normalizedUrl); + let extension = path.extname(normalizedUrl) || path.extname(resolved); + extension && (extension = extension.toLowerCase()); + let information: UploadInformation = { + mediaPaths: [], + fileNames: { clean: resolved }, + contentSize, + contentType, + }; + return new Promise<UploadInformation>(async (resolve, reject) => { + const resizers = [ + { resizer: sharp().rotate(), suffix: "_o" }, + ...Object.values(Sizes).map(size => ({ + resizer: sharp().resize(size.width, undefined, { withoutEnlargement: true }).rotate(), + suffix: size.suffix + })) + ]; + let nonVisual = false; + if (pngs.includes(extension)) { + resizers.forEach(element => element.resizer = element.resizer.png()); + } else if (jpgs.includes(extension)) { + resizers.forEach(element => element.resizer = element.resizer.jpeg()); + } else if (![...imageFormats, ...videoFormats].includes(extension.toLowerCase())) { + nonVisual = true; + } + if (imageFormats.includes(extension)) { + for (let resizer of resizers) { + const suffix = resizer.suffix; + let mediaPath: string; + await new Promise<void>(resolve => { + const filename = resolved.substring(0, resolved.length - extension.length) + suffix + extension; + information.mediaPaths.push(mediaPath = uploadDirectory + filename); + information.fileNames[suffix] = filename; + stream(normalizedUrl).pipe(resizer.resizer).pipe(fs.createWriteStream(mediaPath)) + .on('close', resolve) + .on('error', reject); + }); + } + } + if (!isLocal || nonVisual) { + await new Promise<void>(resolve => { + stream(normalizedUrl).pipe(fs.createWriteStream(uploadDirectory + resolved)).on('close', resolve); + }); + } + resolve(information); + }); + }; + + const classify = (url: string) => { + const isLocal = /Dash-Web(\\|\/)src(\\|\/)server(\\|\/)public(\\|\/)files/g.test(url); + return { + isLocal, + stream: isLocal ? fs.createReadStream : request, + normalized: isLocal ? path.normalize(url) : url + }; + }; + + export const createIfNotExists = async (path: string) => { + if (await new Promise<boolean>(resolve => fs.exists(path, resolve))) { + return true; + } + return new Promise<boolean>(resolve => fs.mkdir(path, error => resolve(error === null))); + }; + + export const Destroy = (mediaPath: string) => new Promise<boolean>(resolve => fs.unlink(mediaPath, error => resolve(error === null))); + +}
\ No newline at end of file diff --git a/src/server/Message.ts b/src/server/Message.ts index 4ec390ade..aaee143e8 100644 --- a/src/server/Message.ts +++ b/src/server/Message.ts @@ -1,5 +1,4 @@ import { Utils } from "../Utils"; -import { google, docs_v1 } from "googleapis"; export class Message<T> { private _name: string; diff --git a/src/server/RouteStore.ts b/src/server/RouteStore.ts index 014906054..ee9cd8a0e 100644 --- a/src/server/RouteStore.ts +++ b/src/server/RouteStore.ts @@ -31,6 +31,10 @@ export enum RouteStore { // APIS cognitiveServices = "/cognitiveservices", - googleDocs = "/googleDocs" + googleDocs = "/googleDocs", + googlePhotosAccessToken = "/googlePhotosAccessToken", + googlePhotosMediaUpload = "/googlePhotosMediaUpload", + googlePhotosMediaDownload = "/googlePhotosMediaDownload", + googleDocsGet = "/googleDocsGet" }
\ No newline at end of file diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index 8785cd974..c899c2ef2 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -1,11 +1,14 @@ -import { google, docs_v1, slides_v1 } from "googleapis"; +import { google } from "googleapis"; import { createInterface } from "readline"; import { readFile, writeFile } from "fs"; -import { OAuth2Client } from "google-auth-library"; +import { OAuth2Client, Credentials } from "google-auth-library"; import { Opt } from "../../../new_fields/Doc"; import { GlobalOptions } from "googleapis-common"; import { GaxiosResponse } from "gaxios"; - +import request = require('request-promise'); +import * as qs from 'query-string'; +import Photos = require('googlephotos'); +import { Database } from "../../database"; /** * Server side authentication for Google Api queries. */ @@ -20,6 +23,9 @@ export namespace GoogleApiServerUtils { 'presentations.readonly', 'drive', 'drive.file', + 'photoslibrary', + 'photoslibrary.appendonly', + 'photoslibrary.sharing' ]; export const parseBuffer = (data: Buffer) => JSON.parse(data.toString()); @@ -29,74 +35,124 @@ export namespace GoogleApiServerUtils { Slides = "Slides" } - - export interface CredentialPaths { - credentials: string; - token: string; + export interface CredentialInformation { + credentialsPath: string; + userId: string; } export type ApiResponse = Promise<GaxiosResponse>; - export type ApiRouter = (endpoint: Endpoint, paramters: any) => ApiResponse; - export type ApiHandler = (parameters: any) => ApiResponse; + export type ApiRouter = (endpoint: Endpoint, parameters: any) => ApiResponse; + export type ApiHandler = (parameters: any, methodOptions?: any) => ApiResponse; export type Action = "create" | "retrieve" | "update"; export type Endpoint = { get: ApiHandler, create: ApiHandler, batchUpdate: ApiHandler }; export type EndpointParameters = GlobalOptions & { version: "v1" }; - export const GetEndpoint = async (sector: string, paths: CredentialPaths) => { - return new Promise<Opt<Endpoint>>((resolve, reject) => { - readFile(paths.credentials, (err, credentials) => { + export const GetEndpoint = (sector: string, paths: CredentialInformation) => { + return new Promise<Opt<Endpoint>>(resolve => { + RetrieveCredentials(paths).then(authentication => { + let routed: Opt<Endpoint>; + let parameters: EndpointParameters = { auth: authentication.client, version: "v1" }; + switch (sector) { + case Service.Documents: + routed = google.docs(parameters).documents; + break; + case Service.Slides: + routed = google.slides(parameters).presentations; + break; + } + resolve(routed); + }); + }); + }; + + export const RetrieveAccessToken = (information: CredentialInformation) => { + return new Promise<string>((resolve, reject) => { + RetrieveCredentials(information).then( + credentials => resolve(credentials.token.access_token!), + error => reject(`Error: unable to authenticate Google Photos API request.\n${error}`) + ); + }); + }; + + export const RetrieveCredentials = (information: CredentialInformation) => { + return new Promise<TokenResult>((resolve, reject) => { + readFile(information.credentialsPath, async (err, credentials) => { if (err) { reject(err); return console.log('Error loading client secret file:', err); } - return authorize(parseBuffer(credentials), paths.token).then(auth => { - let routed: Opt<Endpoint>; - let parameters: EndpointParameters = { auth, version: "v1" }; - switch (sector) { - case Service.Documents: - routed = google.docs(parameters).documents; - break; - case Service.Slides: - routed = google.slides(parameters).presentations; - break; - } - resolve(routed); - }); + authorize(parseBuffer(credentials), information.userId).then(resolve, reject); }); }); }; + export const RetrievePhotosEndpoint = (paths: CredentialInformation) => { + return new Promise<any>((resolve, reject) => { + RetrieveAccessToken(paths).then( + token => resolve(new Photos(token)), + reject + ); + }); + }; + type TokenResult = { token: Credentials, client: OAuth2Client }; /** * Create an OAuth2 client with the given credentials, and returns the promise resolving to the authenticated client * @param {Object} credentials The authorization client credentials. */ - export function authorize(credentials: any, token_path: string): Promise<OAuth2Client> { + export function authorize(credentials: any, userId: string): Promise<TokenResult> { const { client_secret, client_id, redirect_uris } = credentials.installed; - const oAuth2Client = new google.auth.OAuth2( - client_id, client_secret, redirect_uris[0]); - - return new Promise<OAuth2Client>((resolve, reject) => { - readFile(token_path, (err, token) => { - // Check if we have previously stored a token. - if (err) { - return getNewToken(oAuth2Client, token_path).then(resolve, reject); + const oAuth2Client = new google.auth.OAuth2(client_id, client_secret, redirect_uris[0]); + return new Promise<TokenResult>((resolve, reject) => { + // Attempting to authorize user (${userId}) + Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId).then(token => { + if (!token) { + // No token registered, so awaiting input from user + return getNewToken(oAuth2Client, userId).then(resolve, reject); } - oAuth2Client.setCredentials(parseBuffer(token)); - resolve(oAuth2Client); + if (token.expiry_date! < new Date().getTime()) { + // Token has expired, so submitting a request for a refreshed access token + return refreshToken(token, client_id, client_secret, oAuth2Client, userId).then(resolve, reject); + } + // Authentication successful! + oAuth2Client.setCredentials(token); + resolve({ token, client: oAuth2Client }); }); }); } + const refreshEndpoint = "https://oauth2.googleapis.com/token"; + const refreshToken = (credentials: Credentials, client_id: string, client_secret: string, oAuth2Client: OAuth2Client, userId: string) => { + return new Promise<TokenResult>(resolve => { + let headerParameters = { headers: { 'Content-Type': 'application/x-www-form-urlencoded' } }; + let queryParameters = { + refreshToken: credentials.refresh_token, + client_id, + client_secret, + grant_type: "refresh_token" + }; + let url = `${refreshEndpoint}?${qs.stringify(queryParameters)}`; + request.post(url, headerParameters).then(async response => { + let { access_token, expires_in } = JSON.parse(response); + const expiry_date = new Date().getTime() + (expires_in * 1000); + await Database.Auxiliary.GoogleAuthenticationToken.Update(userId, access_token, expiry_date); + credentials.access_token = access_token; + credentials.expiry_date = expiry_date; + oAuth2Client.setCredentials(credentials); + resolve({ token: credentials, client: oAuth2Client }); + }); + }); + }; + /** * Get and store new token after prompting for user authorization, and then * execute the given callback with the authorized OAuth2 client. * @param {google.auth.OAuth2} oAuth2Client The OAuth2 client to get token for. * @param {getEventsCallback} callback The callback for the authorized client. */ - function getNewToken(oAuth2Client: OAuth2Client, token_path: string) { - return new Promise<OAuth2Client>((resolve, reject) => { + function getNewToken(oAuth2Client: OAuth2Client, userId: string) { + return new Promise<TokenResult>((resolve, reject) => { const authUrl = oAuth2Client.generateAuthUrl({ access_type: 'offline', scope: SCOPES.map(relative => prefix + relative), @@ -108,21 +164,14 @@ export namespace GoogleApiServerUtils { }); rl.question('Enter the code from that page here: ', (code) => { rl.close(); - oAuth2Client.getToken(code, (err, token) => { + oAuth2Client.getToken(code, async (err, token) => { if (err || !token) { reject(err); return console.error('Error retrieving access token', err); } oAuth2Client.setCredentials(token); - // Store the token to disk for later program executions - writeFile(token_path, JSON.stringify(token), (err) => { - if (err) { - console.error(err); - reject(err); - } - console.log('Token stored to', token_path); - }); - resolve(oAuth2Client); + await Database.Auxiliary.GoogleAuthenticationToken.Write(userId, token); + resolve({ token, client: oAuth2Client }); }); }); }); diff --git a/src/server/apis/google/GooglePhotosUploadUtils.ts b/src/server/apis/google/GooglePhotosUploadUtils.ts index 35f986250..16c4f6c3a 100644 --- a/src/server/apis/google/GooglePhotosUploadUtils.ts +++ b/src/server/apis/google/GooglePhotosUploadUtils.ts @@ -1,12 +1,9 @@ import request = require('request-promise'); import { GoogleApiServerUtils } from './GoogleApiServerUtils'; -import * as fs from 'fs'; -import { Utils } from '../../../Utils'; import * as path from 'path'; -import { Opt } from '../../../new_fields/Doc'; -import * as sharp from 'sharp'; - -const uploadDirectory = path.join(__dirname, "../../public/files/"); +import { MediaItemCreationResult } from './SharedTypes'; +import { NewMediaItem } from "../../index"; +import { BatchedArray, TimeUnit } from 'array-batcher'; export namespace GooglePhotosUploadUtils { @@ -28,12 +25,9 @@ export namespace GooglePhotosUploadUtils { }); let Bearer: string; - let Paths: Paths; - export const initialize = async (paths: Paths) => { - Paths = paths; - const { tokenPath, credentialsPath } = paths; - const token = await GoogleApiServerUtils.RetrieveAccessToken({ tokenPath, credentialsPath }); + export const initialize = async (information: GoogleApiServerUtils.CredentialInformation) => { + const token = await GoogleApiServerUtils.RetrieveAccessToken(information); Bearer = `Bearer ${token}`; }; @@ -49,128 +43,39 @@ export namespace GooglePhotosUploadUtils { uri: prepend('uploads'), body }; - return new Promise<any>(resolve => request(parameters, (error, _response, body) => resolve(error ? undefined : body))); - }; - - export const CreateMediaItems = (newMediaItems: any[], album?: { id: string }) => { - return new Promise<any>((resolve, reject) => { - const parameters = { - method: 'POST', - headers: headers('json'), - uri: prepend('mediaItems:batchCreate'), - body: { newMediaItems } as any, - json: true - }; - album && (parameters.body.albumId = album.id); - request(parameters, (error, _response, body) => { - if (error) { - reject(error); - } else { - resolve(body); - } - }); - }); - }; - -} - -export namespace DownloadUtils { - - export interface Size { - width: number; - suffix: string; - } - - export const Sizes: { [size: string]: Size } = { - SMALL: { width: 100, suffix: "_s" }, - MEDIUM: { width: 400, suffix: "_m" }, - LARGE: { width: 900, suffix: "_l" }, - }; - - const png = ".png"; - const pngs = [".png", ".PNG"]; - const jpgs = [".jpg", ".JPG", ".jpeg", ".JPEG"]; - const formats = [".jpg", ".png", ".gif"]; - const size = "content-length"; - const type = "content-type"; - - export interface UploadInformation { - mediaPaths: string[]; - fileNames: { [key: string]: string }; - contentSize?: number; - contentType?: string; - } - - const generate = (prefix: string, url: string) => `${prefix}upload_${Utils.GenerateGuid()}${path.extname(url).toLowerCase()}`; - const sanitize = (filename: string) => filename.replace(/\s+/g, "_"); - - export const UploadImage = async (url: string, filename?: string, prefix = ""): Promise<Opt<UploadInformation>> => { - const resolved = filename ? sanitize(filename) : generate(prefix, url); - const extension = path.extname(url) || path.extname(resolved) || png; - let information: UploadInformation = { - mediaPaths: [], - fileNames: { clean: resolved } - }; - const { isLocal, stream, normalized } = classify(url); - url = normalized; - if (!isLocal) { - const metadata = (await new Promise<any>((resolve, reject) => { - request.head(url, async (error, res) => { - if (error) { - return reject(error); - } - resolve(res); - }); - })).headers; - information.contentSize = parseInt(metadata[size]); - information.contentType = metadata[type]; - } - return new Promise<UploadInformation>(async (resolve, reject) => { - const resizers = [ - { resizer: sharp().rotate(), suffix: "_o" }, - ...Object.values(Sizes).map(size => ({ - resizer: sharp().resize(size.width, undefined, { withoutEnlargement: true }).rotate(), - suffix: size.suffix - })) - ]; - if (pngs.includes(extension)) { - resizers.forEach(element => element.resizer = element.resizer.png()); - } else if (jpgs.includes(extension)) { - resizers.forEach(element => element.resizer = element.resizer.jpeg()); - } else if (!formats.includes(extension.toLowerCase())) { - return reject(); + return new Promise<any>((resolve, reject) => request(parameters, (error, _response, body) => { + if (error) { + console.log(error); + return reject(error); } - for (let resizer of resizers) { - const suffix = resizer.suffix; - let mediaPath: string; - await new Promise<void>(resolve => { - const filename = resolved.substring(0, resolved.length - extension.length) + suffix + extension; - information.mediaPaths.push(mediaPath = uploadDirectory + filename); - information.fileNames[suffix] = filename; - stream(url).pipe(resizer.resizer).pipe(fs.createWriteStream(mediaPath)) - .on('close', resolve) - .on('error', reject); - }); - } - resolve(information); - }); + resolve(body); + })); }; - const classify = (url: string) => { - const isLocal = /Dash-Web(\\|\/)src(\\|\/)server(\\|\/)public(\\|\/)files/g.test(url); - return { - isLocal, - stream: isLocal ? fs.createReadStream : request, - normalized: isLocal ? path.normalize(url) : url - }; - }; - - export const createIfNotExists = async (path: string) => { - if (await new Promise<boolean>(resolve => fs.exists(path, resolve))) { - return true; - } - return new Promise<boolean>(resolve => fs.mkdir(path, error => resolve(error === null))); + export const CreateMediaItems = async (newMediaItems: NewMediaItem[], album?: { id: string }): Promise<MediaItemCreationResult> => { + const newMediaItemResults = await BatchedArray.from(newMediaItems, { batchSize: 50 }).batchedMapPatientInterval( + { magnitude: 100, unit: TimeUnit.Milliseconds }, + async (batch: NewMediaItem[]) => { + const parameters = { + method: 'POST', + headers: headers('json'), + uri: prepend('mediaItems:batchCreate'), + body: { newMediaItems: batch } as any, + json: true + }; + album && (parameters.body.albumId = album.id); + return (await new Promise<MediaItemCreationResult>((resolve, reject) => { + request(parameters, (error, _response, body) => { + if (error) { + reject(error); + } else { + resolve(body); + } + }); + })).newMediaItemResults; + } + ); + return { newMediaItemResults }; }; - export const Destroy = (mediaPath: string) => new Promise<boolean>(resolve => fs.unlink(mediaPath, error => resolve(error === null))); }
\ No newline at end of file diff --git a/src/server/apis/google/SharedTypes.ts b/src/server/apis/google/SharedTypes.ts new file mode 100644 index 000000000..9ad6130b6 --- /dev/null +++ b/src/server/apis/google/SharedTypes.ts @@ -0,0 +1,21 @@ +export interface NewMediaItemResult { + uploadToken: string; + status: { code: number, message: string }; + mediaItem: MediaItem; +} + +export interface MediaItem { + id: string; + description: string; + productUrl: string; + baseUrl: string; + mimeType: string; + mediaMetadata: { + creationTime: string; + width: string; + height: string; + }; + filename: string; +} + +export type MediaItemCreationResult = { newMediaItemResults: NewMediaItemResult[] };
\ No newline at end of file diff --git a/src/server/authentication/config/passport.ts b/src/server/authentication/config/passport.ts index 10b17de71..f69b274d3 100644 --- a/src/server/authentication/config/passport.ts +++ b/src/server/authentication/config/passport.ts @@ -1,7 +1,6 @@ import * as passport from 'passport'; import * as passportLocal from 'passport-local'; -import * as mongodb from 'mongodb'; -import * as _ from "lodash"; +import _ from "lodash"; import { default as User } from '../models/user_model'; import { Request, Response, NextFunction } from "express"; import { RouteStore } from '../../RouteStore'; @@ -43,7 +42,7 @@ export let isAuthorized = (req: Request, res: Response, next: NextFunction) => { const provider = req.path.split("/").slice(-1)[0]; if (req.user) { - if (_.find((req.user as any).tokens, { kind: provider })) { + if (_.find(req.user.tokens, { kind: provider })) { next(); } else { res.redirect(`/auth/${provider}`); diff --git a/src/server/authentication/models/current_user_utils.ts b/src/server/authentication/models/current_user_utils.ts index 1af36fccd..24bad694d 100644 --- a/src/server/authentication/models/current_user_utils.ts +++ b/src/server/authentication/models/current_user_utils.ts @@ -2,7 +2,6 @@ import { action, computed, observable, runInAction } from "mobx"; import * as rp from 'request-promise'; import { DocServer } from "../../../client/DocServer"; import { Docs } from "../../../client/documents/Documents"; -import { Gateway, NorthstarSettings } from "../../../client/northstar/manager/Gateway"; import { Attribute, AttributeGroup, Catalog, Schema } from "../../../client/northstar/model/idea/idea"; import { ArrayUtil } from "../../../client/northstar/utils/ArrayUtil"; import { CollectionViewType } from "../../../client/views/collections/CollectionBaseView"; @@ -24,6 +23,9 @@ export class CurrentUserUtils { public static get MainDocId() { return this.mainDocId; } public static set MainDocId(id: string | undefined) { this.mainDocId = id; } + @observable public static GuestTarget: Doc | undefined; + @observable public static GuestWorkspace: Doc | undefined; + private static createUserDocument(id: string): Doc { let doc = new Doc(id, true); doc.viewType = CollectionViewType.Tree; @@ -132,7 +134,7 @@ export class CurrentUserUtils { this.curr_id = id; Doc.CurrentUserEmail = email; await rp.get(Utils.prepend(RouteStore.getUserDocumentId)).then(id => { - if (id) { + if (id && id !== "guest") { return DocServer.GetRefField(id).then(async field => { if (field instanceof Doc) { await this.updateUserDocument(field); diff --git a/src/server/credentials/google_docs_credentials.json b/src/server/credentials/google_docs_credentials.json index 8d097d363..955c5a3c1 100644 --- a/src/server/credentials/google_docs_credentials.json +++ b/src/server/credentials/google_docs_credentials.json @@ -1 +1,11 @@ -{"installed":{"client_id":"343179513178-ud6tvmh275r2fq93u9eesrnc66t6akh9.apps.googleusercontent.com","project_id":"quickstart-1565056383187","auth_uri":"https://accounts.google.com/o/oauth2/auth","token_uri":"https://oauth2.googleapis.com/token","auth_provider_x509_cert_url":"https://www.googleapis.com/oauth2/v1/certs","client_secret":"w8KIFSc0MQpmUYHed4qEzn8b","redirect_uris":["urn:ietf:wg:oauth:2.0:oob","http://localhost"]}}
\ No newline at end of file +{ + "installed": { + "client_id": "343179513178-ud6tvmh275r2fq93u9eesrnc66t6akh9.apps.googleusercontent.com", + "project_id": "quickstart-1565056383187", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://oauth2.googleapis.com/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_secret": "w8KIFSc0MQpmUYHed4qEzn8b", + "redirect_uris": ["urn:ietf:wg:oauth:2.0:oob", "http://localhost"] + } +}
\ No newline at end of file diff --git a/src/server/credentials/google_docs_token.json b/src/server/credentials/google_docs_token.json deleted file mode 100644 index 07c02d56c..000000000 --- a/src/server/credentials/google_docs_token.json +++ /dev/null @@ -1 +0,0 @@ -{"access_token":"ya29.GltjB4-x03xFpd2NY2555cxg1xlT_ajqRi78M9osOfdOF2jTIjlPkn_UZL8cUwVP0DPC8rH3vhhg8RpspFe8Vewx92shAO3RPos_uMH0CUqEiCiZlaaB5I3Jq3Mv","refresh_token":"1/teUKUqGKMLjVqs-eed0L8omI02pzSxMUYaxGc2QxBw0","scope":"https://www.googleapis.com/auth/documents https://www.googleapis.com/auth/drive https://www.googleapis.com/auth/drive.file https://www.googleapis.com/auth/documents.readonly","token_type":"Bearer","expiry_date":1565654175862}
\ No newline at end of file diff --git a/src/server/database.ts b/src/server/database.ts index a7254fb0c..d2375ebd9 100644 --- a/src/server/database.ts +++ b/src/server/database.ts @@ -1,209 +1,295 @@ import * as mongodb from 'mongodb'; import { Transferable } from './Message'; +import { Opt } from '../new_fields/Doc'; +import { Utils, emptyFunction } from '../Utils'; +import { DashUploadUtils } from './DashUploadUtils'; +import { Credentials } from 'google-auth-library'; -export class Database { - public static DocumentsCollection = 'documents'; - public static Instance = new Database(); - private MongoClient = mongodb.MongoClient; - private url = 'mongodb://localhost:27017/Dash'; - private currentWrites: { [id: string]: Promise<void> } = {}; - private db?: mongodb.Db; - private onConnect: (() => void)[] = []; - - constructor() { - this.MongoClient.connect(this.url, (err, client) => { - this.db = client.db(); - this.onConnect.forEach(fn => fn()); - }); - } +export namespace Database { - public update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = Database.DocumentsCollection) { - if (this.db) { - let collection = this.db.collection(collectionName); - const prom = this.currentWrites[id]; - let newProm: Promise<void>; - const run = (): Promise<void> => { - return new Promise<void>(resolve => { - collection.updateOne({ _id: id }, value, { upsert } - , (err, res) => { - if (this.currentWrites[id] === newProm) { - delete this.currentWrites[id]; - } - resolve(); - callback(err, res); - }); - }); - }; - newProm = prom ? prom.then(run) : run(); - this.currentWrites[id] = newProm; - } else { - this.onConnect.push(() => this.update(id, value, callback, upsert, collectionName)); - } - } + class Database { + public static DocumentsCollection = 'documents'; + private MongoClient = mongodb.MongoClient; + private url = 'mongodb://localhost:27017/Dash'; + private currentWrites: { [id: string]: Promise<void> } = {}; + private db?: mongodb.Db; + private onConnect: (() => void)[] = []; - public replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = Database.DocumentsCollection) { - if (this.db) { - let collection = this.db.collection(collectionName); - const prom = this.currentWrites[id]; - let newProm: Promise<void>; - const run = (): Promise<void> => { - return new Promise<void>(resolve => { - collection.replaceOne({ _id: id }, value, { upsert } - , (err, res) => { - if (this.currentWrites[id] === newProm) { - delete this.currentWrites[id]; - } - resolve(); - callback(err, res); - }); - }); - }; - newProm = prom ? prom.then(run) : run(); - this.currentWrites[id] = newProm; - } else { - this.onConnect.push(() => this.replace(id, value, callback, upsert, collectionName)); + constructor() { + this.MongoClient.connect(this.url, (err, client) => { + this.db = client.db(); + this.onConnect.forEach(fn => fn()); + }); } - } - public delete(query: any, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; - public delete(id: string, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; - public delete(id: any, collectionName = Database.DocumentsCollection) { - if (typeof id === "string") { - id = { _id: id }; - } - if (this.db) { - const db = this.db; - return new Promise(res => db.collection(collectionName).deleteMany(id, (err, result) => res(result))); - } else { - return new Promise(res => this.onConnect.push(() => res(this.delete(id, collectionName)))); + public async update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = Database.DocumentsCollection) { + if (this.db) { + let collection = this.db.collection(collectionName); + const prom = this.currentWrites[id]; + let newProm: Promise<void>; + const run = (): Promise<void> => { + return new Promise<void>(resolve => { + collection.updateOne({ _id: id }, value, { upsert } + , (err, res) => { + if (this.currentWrites[id] === newProm) { + delete this.currentWrites[id]; + } + resolve(); + callback(err, res); + }); + }); + }; + newProm = prom ? prom.then(run) : run(); + this.currentWrites[id] = newProm; + return newProm; + } else { + this.onConnect.push(() => this.update(id, value, callback, upsert, collectionName)); + } } - } - public deleteAll(collectionName = Database.DocumentsCollection): Promise<any> { - return new Promise(res => { + public replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = Database.DocumentsCollection) { if (this.db) { - this.db.collection(collectionName).deleteMany({}, res); + let collection = this.db.collection(collectionName); + const prom = this.currentWrites[id]; + let newProm: Promise<void>; + const run = (): Promise<void> => { + return new Promise<void>(resolve => { + collection.replaceOne({ _id: id }, value, { upsert } + , (err, res) => { + if (this.currentWrites[id] === newProm) { + delete this.currentWrites[id]; + } + resolve(); + callback(err, res); + }); + }); + }; + newProm = prom ? prom.then(run) : run(); + this.currentWrites[id] = newProm; } else { - this.onConnect.push(() => this.db && this.db.collection(collectionName).deleteMany({}, res)); + this.onConnect.push(() => this.replace(id, value, callback, upsert, collectionName)); } - }); - } + } - public insert(value: any, collectionName = Database.DocumentsCollection) { - if (this.db) { - if ("id" in value) { - value._id = value.id; - delete value.id; + public delete(query: any, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; + public delete(id: string, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; + public delete(id: any, collectionName = Database.DocumentsCollection) { + if (typeof id === "string") { + id = { _id: id }; + } + if (this.db) { + const db = this.db; + return new Promise(res => db.collection(collectionName).deleteMany(id, (err, result) => res(result))); + } else { + return new Promise(res => this.onConnect.push(() => res(this.delete(id, collectionName)))); } - const id = value._id; - const collection = this.db.collection(collectionName); - const prom = this.currentWrites[id]; - let newProm: Promise<void>; - const run = (): Promise<void> => { - return new Promise<void>(resolve => { - collection.insertOne(value, (err, res) => { - if (this.currentWrites[id] === newProm) { - delete this.currentWrites[id]; - } - resolve(); - }); - }); - }; - newProm = prom ? prom.then(run) : run(); - this.currentWrites[id] = newProm; - } else { - this.onConnect.push(() => this.insert(value, collectionName)); } - } - public getDocument(id: string, fn: (result?: Transferable) => void, collectionName = Database.DocumentsCollection) { - if (this.db) { - this.db.collection(collectionName).findOne({ _id: id }, (err, result) => { - if (result) { - result.id = result._id; - delete result._id; - fn(result); + public async deleteAll(collectionName = Database.DocumentsCollection, persist = true): Promise<any> { + return new Promise(resolve => { + const executor = async (database: mongodb.Db) => { + if (persist) { + await database.collection(collectionName).deleteMany({}); + } else { + await database.dropCollection(collectionName); + } + resolve(); + }; + if (this.db) { + executor(this.db); } else { - fn(undefined); + this.onConnect.push(() => this.db && executor(this.db)); } }); - } else { - this.onConnect.push(() => this.getDocument(id, fn, collectionName)); } - } - public getDocuments(ids: string[], fn: (result: Transferable[]) => void, collectionName = Database.DocumentsCollection) { - if (this.db) { - this.db.collection(collectionName).find({ _id: { "$in": ids } }).toArray((err, docs) => { - if (err) { - console.log(err.message); - console.log(err.errmsg); + public async insert(value: any, collectionName = Database.DocumentsCollection) { + if (this.db) { + if ("id" in value) { + value._id = value.id; + delete value.id; } - fn(docs.map(doc => { - doc.id = doc._id; - delete doc._id; - return doc; - })); - }); - } else { - this.onConnect.push(() => this.getDocuments(ids, fn, collectionName)); + const id = value._id; + const collection = this.db.collection(collectionName); + const prom = this.currentWrites[id]; + let newProm: Promise<void>; + const run = (): Promise<void> => { + return new Promise<void>(resolve => { + collection.insertOne(value, (err, res) => { + if (this.currentWrites[id] === newProm) { + delete this.currentWrites[id]; + } + resolve(); + }); + }); + }; + newProm = prom ? prom.then(run) : run(); + this.currentWrites[id] = newProm; + return newProm; + } else { + this.onConnect.push(() => this.insert(value, collectionName)); + } } - } - public async visit(ids: string[], fn: (result: any) => string[], collectionName = "newDocuments"): Promise<void> { - if (this.db) { - const visited = new Set<string>(); - while (ids.length) { - const count = Math.min(ids.length, 1000); - const index = ids.length - count; - const fetchIds = ids.splice(index, count).filter(id => !visited.has(id)); - if (!fetchIds.length) { - continue; - } - const docs = await new Promise<{ [key: string]: any }[]>(res => Database.Instance.getDocuments(fetchIds, res, "newDocuments")); - for (const doc of docs) { - const id = doc.id; - visited.add(id); - ids.push(...fn(doc)); + public getDocument(id: string, fn: (result?: Transferable) => void, collectionName = Database.DocumentsCollection) { + if (this.db) { + this.db.collection(collectionName).findOne({ _id: id }, (err, result) => { + if (result) { + result.id = result._id; + delete result._id; + fn(result); + } else { + fn(undefined); + } + }); + } else { + this.onConnect.push(() => this.getDocument(id, fn, collectionName)); + } + } + + public getDocuments(ids: string[], fn: (result: Transferable[]) => void, collectionName = Database.DocumentsCollection) { + if (this.db) { + this.db.collection(collectionName).find({ _id: { "$in": ids } }).toArray((err, docs) => { + if (err) { + console.log(err.message); + console.log(err.errmsg); + } + fn(docs.map(doc => { + doc.id = doc._id; + delete doc._id; + return doc; + })); + }); + } else { + this.onConnect.push(() => this.getDocuments(ids, fn, collectionName)); + } + } + + public async visit(ids: string[], fn: (result: any) => string[], collectionName = "newDocuments"): Promise<void> { + if (this.db) { + const visited = new Set<string>(); + while (ids.length) { + const count = Math.min(ids.length, 1000); + const index = ids.length - count; + const fetchIds = ids.splice(index, count).filter(id => !visited.has(id)); + if (!fetchIds.length) { + continue; + } + const docs = await new Promise<{ [key: string]: any }[]>(res => Instance.getDocuments(fetchIds, res, "newDocuments")); + for (const doc of docs) { + const id = doc.id; + visited.add(id); + ids.push(...fn(doc)); + } } + + } else { + return new Promise(res => { + this.onConnect.push(() => { + this.visit(ids, fn, collectionName); + res(); + }); + }); } + } - } else { - return new Promise(res => { - this.onConnect.push(() => { - this.visit(ids, fn, collectionName); - res(); + public query(query: { [key: string]: any }, projection?: { [key: string]: 0 | 1 }, collectionName = "newDocuments"): Promise<mongodb.Cursor> { + if (this.db) { + let cursor = this.db.collection(collectionName).find(query); + if (projection) { + cursor = cursor.project(projection); + } + return Promise.resolve<mongodb.Cursor>(cursor); + } else { + return new Promise<mongodb.Cursor>(res => { + this.onConnect.push(() => res(this.query(query, projection, collectionName))); }); - }); + } } - } - public query(query: { [key: string]: any }, projection?: { [key: string]: 0 | 1 }, collectionName = "newDocuments"): Promise<mongodb.Cursor> { - if (this.db) { - let cursor = this.db.collection(collectionName).find(query); - if (projection) { - cursor = cursor.project(projection); + public updateMany(query: any, update: any, collectionName = "newDocuments") { + if (this.db) { + const db = this.db; + return new Promise<mongodb.WriteOpResult>(res => db.collection(collectionName).update(query, update, (_, result) => res(result))); + } else { + return new Promise<mongodb.WriteOpResult>(res => { + this.onConnect.push(() => this.updateMany(query, update, collectionName).then(res)); + }); } - return Promise.resolve<mongodb.Cursor>(cursor); - } else { - return new Promise<mongodb.Cursor>(res => { - this.onConnect.push(() => res(this.query(query, projection, collectionName))); - }); } - } - public updateMany(query: any, update: any, collectionName = "newDocuments") { - if (this.db) { - const db = this.db; - return new Promise<mongodb.WriteOpResult>(res => db.collection(collectionName).update(query, update, (_, result) => res(result))); - } else { - return new Promise<mongodb.WriteOpResult>(res => { - this.onConnect.push(() => this.updateMany(query, update, collectionName).then(res)); - }); + public print() { + console.log("db says hi!"); } } - public print() { - console.log("db says hi!"); + export const Instance = new Database(); + + export namespace Auxiliary { + + export enum AuxiliaryCollections { + GooglePhotosUploadHistory = "uploadedFromGooglePhotos" + } + + const SanitizedCappedQuery = async (query: { [key: string]: any }, collection: string, cap: number, removeId = true) => { + const cursor = await Instance.query(query, undefined, collection); + const results = await cursor.toArray(); + const slice = results.slice(0, Math.min(cap, results.length)); + return removeId ? slice.map(result => { + delete result._id; + return result; + }) : slice; + }; + + const SanitizedSingletonQuery = async <T>(query: { [key: string]: any }, collection: string, removeId = true): Promise<Opt<T>> => { + const results = await SanitizedCappedQuery(query, collection, 1, removeId); + return results.length ? results[0] : undefined; + }; + + export const QueryUploadHistory = async (contentSize: number) => { + return SanitizedSingletonQuery<DashUploadUtils.UploadInformation>({ contentSize }, AuxiliaryCollections.GooglePhotosUploadHistory); + }; + + export namespace GoogleAuthenticationToken { + + const GoogleAuthentication = "googleAuthentication"; + + export type StoredCredentials = Credentials & { _id: string }; + + export const Fetch = async (userId: string, removeId = true) => { + return SanitizedSingletonQuery<StoredCredentials>({ userId }, GoogleAuthentication, removeId); + }; + + export const Write = async (userId: string, token: any) => { + return Instance.insert({ userId, canAccess: [], ...token }, GoogleAuthentication); + }; + + export const Update = async (userId: string, access_token: string, expiry_date: number) => { + const entry = await Fetch(userId, false); + if (entry) { + const parameters = { $set: { access_token, expiry_date } }; + return Instance.update(entry._id, parameters, emptyFunction, true, GoogleAuthentication); + } + }; + + export const DeleteAll = () => Instance.deleteAll(GoogleAuthentication, false); + + } + + export const LogUpload = async (information: DashUploadUtils.UploadInformation) => { + const bundle = { + _id: Utils.GenerateDeterministicGuid(String(information.contentSize!)), + ...information + }; + return Instance.insert(bundle, AuxiliaryCollections.GooglePhotosUploadHistory); + }; + + export const DeleteAll = async (persist = false) => { + const collectionNames = Object.values(AuxiliaryCollections); + const pendingDeletions = collectionNames.map(name => Instance.deleteAll(name, persist)); + return Promise.all(pendingDeletions); + }; + } -} + +}
\ No newline at end of file diff --git a/src/server/index.ts b/src/server/index.ts index 50ce2b14e..7ebb374e9 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -29,26 +29,26 @@ import { RouteStore } from './RouteStore'; import v4 = require('uuid/v4'); const app = express(); const config = require('../../webpack.config'); -import { createCanvas, loadImage, Canvas } from "canvas"; +import { createCanvas } from "canvas"; const compiler = webpack(config); const port = 1050; // default port to listen const serverPort = 4321; import expressFlash = require('express-flash'); import flash = require('connect-flash'); import { Search } from './Search'; -import _ = require('lodash'); import * as Archiver from 'archiver'; var AdmZip = require('adm-zip'); import * as YoutubeApi from "./apis/youtube/youtubeApiSample"; import { Response } from 'express-serve-static-core'; import { GoogleApiServerUtils } from "./apis/google/GoogleApiServerUtils"; -import { GaxiosResponse } from 'gaxios'; -import { Opt } from '../new_fields/Doc'; -import { docs_v1 } from 'googleapis'; -import { Endpoint } from 'googleapis-common'; +import { GooglePhotosUploadUtils } from './apis/google/GooglePhotosUploadUtils'; const MongoStore = require('connect-mongo')(session); const mongoose = require('mongoose'); const probe = require("probe-image-size"); +import * as qs from 'query-string'; +import { Opt } from '../new_fields/Doc'; +import { DashUploadUtils } from './DashUploadUtils'; +import { BatchedArray, TimeUnit } from 'array-batcher'; const download = (url: string, dest: fs.PathLike) => request.get(url).pipe(fs.createWriteStream(dest)); let youtubeApiKey: string; @@ -115,8 +115,10 @@ function addSecureRoute(method: Method, ...subscribers: string[] ) { let abstracted = (req: express.Request, res: express.Response) => { - if (req.user) { - handler(req.user as any, res, req); + let sharing = qs.parse(qs.extract(req.originalUrl), { sort: false }).sharing === "true"; + sharing = sharing && req.originalUrl.startsWith("/doc/"); + if (req.user || sharing) { + handler(req.user, res, req); } else { req.session!.target = req.originalUrl; onRejection(res, req); @@ -157,6 +159,13 @@ app.get("/buxton", (req, res) => { command_line('python scraper.py', cwd).then(onResolved, tryPython3); }); +const STATUS = { + OK: 200, + BAD_REQUEST: 400, + EXECUTION_ERROR: 500, + PERMISSION_DENIED: 403 +}; + const command_line = (command: string, fromDirectory?: string) => { return new Promise<string>((resolve, reject) => { let options: ExecOptions = {}; @@ -420,10 +429,10 @@ app.get("/thumbnail/:filename", (req, res) => { let filename = req.params.filename; let noExt = filename.substring(0, filename.length - ".png".length); let pagenumber = parseInt(noExt.split('-')[1]); - fs.exists(uploadDir + filename, (exists: boolean) => { - console.log(`${uploadDir + filename} ${exists ? "exists" : "does not exist"}`); + fs.exists(uploadDirectory + filename, (exists: boolean) => { + console.log(`${uploadDirectory + filename} ${exists ? "exists" : "does not exist"}`); if (exists) { - let input = fs.createReadStream(uploadDir + filename); + let input = fs.createReadStream(uploadDirectory + filename); probe(input, (err: any, result: any) => { if (err) { console.log(err); @@ -434,7 +443,7 @@ app.get("/thumbnail/:filename", (req, res) => { }); } else { - LoadPage(uploadDir + filename.substring(0, filename.length - noExt.split('-')[1].length - ".PNG".length - 1) + ".pdf", pagenumber, res); + LoadPage(uploadDirectory + filename.substring(0, filename.length - noExt.split('-')[1].length - ".PNG".length - 1) + ".pdf", pagenumber, res); } }); }); @@ -503,21 +512,20 @@ addSecureRoute( res.sendFile(path.join(__dirname, '../../deploy/' + filename)); }, undefined, - RouteStore.home, - RouteStore.openDocumentWithId + RouteStore.home, RouteStore.openDocumentWithId ); addSecureRoute( Method.GET, - (user, res) => res.send(user.userDocumentId || ""), - undefined, + (user, res) => res.send(user.userDocumentId), + (res) => res.send(undefined), RouteStore.getUserDocumentId, ); addSecureRoute( Method.GET, - (user, res) => res.send(JSON.stringify({ id: user.id, email: user.email })), - undefined, + (user, res) => { res.send(JSON.stringify({ id: user.id, email: user.email })); }, + (res) => res.send(JSON.stringify({ id: "__guest__", email: "" })), RouteStore.getCurrUser ); @@ -556,50 +564,29 @@ class NodeCanvasFactory { } const pngTypes = [".png", ".PNG"]; -const pdfTypes = [".pdf", ".PDF"]; const jpgTypes = [".jpg", ".JPG", ".jpeg", ".JPEG"]; -const uploadDir = __dirname + "/public/files/"; +const uploadDirectory = __dirname + "/public/files/"; +interface FileResponse { + name: string; + path: string; + type: string; +} // SETTERS app.post( RouteStore.upload, (req, res) => { let form = new formidable.IncomingForm(); - form.uploadDir = uploadDir; + form.uploadDir = uploadDirectory; form.keepExtensions = true; - // let path = req.body.path; - console.log("upload"); - form.parse(req, (err, fields, files) => { - console.log("parsing"); - let names: string[] = []; - for (const name in files) { - const file = path.basename(files[name].path); - const ext = path.extname(file); - let resizers = [ - { resizer: sharp().rotate(), suffix: "_o" }, - { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }).rotate(), suffix: "_s" }, - { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }).rotate(), suffix: "_m" }, - { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }).rotate(), suffix: "_l" }, - ]; - let isImage = false; - if (pngTypes.includes(ext)) { - resizers.forEach(element => { - element.resizer = element.resizer.png(); - }); - isImage = true; - } else if (jpgTypes.includes(ext)) { - resizers.forEach(element => { - element.resizer = element.resizer.jpeg(); - }); - isImage = true; - } - if (isImage) { - resizers.forEach(resizer => { - fs.createReadStream(uploadDir + file).pipe(resizer.resizer).pipe(fs.createWriteStream(uploadDir + file.substring(0, file.length - ext.length) + resizer.suffix + ext)); - }); - } - names.push(`/files/` + file); + form.parse(req, async (_err, _fields, files) => { + let results: FileResponse[] = []; + for (const key in files) { + const { type, path: location, name } = files[key]; + const filename = path.basename(location); + await DashUploadUtils.UploadImage(uploadDirectory + filename, filename).catch(() => console.log(`Unable to process ${filename}`)); + results.push({ name, type, path: `/files/${filename}` }); } - res.send(names); + _success(res, results); }); } ); @@ -613,7 +600,7 @@ addSecureRoute( res.status(401).send("incorrect parameters specified"); return; } - imageDataUri.outputFile(uri, uploadDir + filename).then((savedName: string) => { + imageDataUri.outputFile(uri, uploadDirectory + filename).then((savedName: string) => { const ext = path.extname(savedName); let resizers = [ { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, @@ -634,7 +621,7 @@ addSecureRoute( } if (isImage) { resizers.forEach(resizer => { - fs.createReadStream(savedName).pipe(resizer.resizer).pipe(fs.createWriteStream(uploadDir + filename + resizer.suffix + ext)); + fs.createReadStream(savedName).pipe(resizer.resizer).pipe(fs.createWriteStream(uploadDirectory + filename + resizer.suffix + ext)); }); } res.send("/files/" + filename + ext); @@ -681,21 +668,29 @@ app.use(RouteStore.corsProxy, (req, res) => { }).pipe(res); }); -app.get(RouteStore.delete, (req, res) => { - if (release) { - res.send("no"); - return; - } - deleteFields().then(() => res.redirect(RouteStore.home)); -}); +addSecureRoute( + Method.GET, + (user, res, req) => { + if (release) { + return _permission_denied(res, deletionPermissionError); + } + deleteFields().then(() => res.redirect(RouteStore.home)); + }, + undefined, + RouteStore.delete +); -app.get(RouteStore.deleteAll, (req, res) => { - if (release) { - res.send("no"); - return; - } - deleteAll().then(() => res.redirect(RouteStore.home)); -}); +addSecureRoute( + Method.GET, + (_user, res, _req) => { + if (release) { + return _permission_denied(res, deletionPermissionError); + } + deleteAll().then(() => res.redirect(RouteStore.home)); + }, + undefined, + RouteStore.deleteAll +); app.use(wdm(compiler, { publicPath: config.output.publicPath })); @@ -801,8 +796,7 @@ function HandleYoutubeQuery([query, callback]: [YoutubeQueryInput, (result?: any } } -const credentials = path.join(__dirname, "./credentials/google_docs_credentials.json"); -const token = path.join(__dirname, "./credentials/google_docs_token.json"); +const credentialsPath = path.join(__dirname, "./credentials/google_docs_credentials.json"); const EndpointHandlerMap = new Map<GoogleApiServerUtils.Action, GoogleApiServerUtils.ApiRouter>([ ["create", (api, params) => api.create(params)], @@ -813,7 +807,7 @@ const EndpointHandlerMap = new Map<GoogleApiServerUtils.Action, GoogleApiServerU app.post(RouteStore.googleDocs + "/:sector/:action", (req, res) => { let sector: GoogleApiServerUtils.Service = req.params.sector as GoogleApiServerUtils.Service; let action: GoogleApiServerUtils.Action = req.params.action as GoogleApiServerUtils.Action; - GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], { credentials, token }).then(endpoint => { + GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], { credentialsPath, userId: req.headers.userId as string }).then(endpoint => { let handler = EndpointHandlerMap.get(action); if (endpoint && handler) { let execute = handler(endpoint, req.body).then( @@ -827,6 +821,134 @@ app.post(RouteStore.googleDocs + "/:sector/:action", (req, res) => { }); }); +app.get(RouteStore.googlePhotosAccessToken, (req, res) => GoogleApiServerUtils.RetrieveAccessToken({ credentialsPath, userId: req.header("userId")! }).then(token => res.send(token))); + +const tokenError = "Unable to successfully upload bytes for all images!"; +const mediaError = "Unable to convert all uploaded bytes to media items!"; +const userIdError = "Unable to parse the identification of the user!"; + +export interface NewMediaItem { + description: string; + simpleMediaItem: { + uploadToken: string; + }; +} + +app.post(RouteStore.googlePhotosMediaUpload, async (req, res) => { + const { media } = req.body; + const userId = req.header("userId"); + + if (!userId) { + return _error(res, userIdError); + } + + await GooglePhotosUploadUtils.initialize({ credentialsPath, userId }); + + let failed = 0; + + const newMediaItems = await BatchedArray.from<GooglePhotosUploadUtils.MediaInput>(media, { batchSize: 25 }).batchedMapPatientInterval( + { magnitude: 100, unit: TimeUnit.Milliseconds }, + async (batch: GooglePhotosUploadUtils.MediaInput[]) => { + const newMediaItems: NewMediaItem[] = []; + for (let element of batch) { + const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(element.url); + if (!uploadToken) { + failed++; + } else { + newMediaItems.push({ + description: element.description, + simpleMediaItem: { uploadToken } + }); + } + } + return newMediaItems; + } + ); + + if (failed) { + return _error(res, tokenError); + } + + GooglePhotosUploadUtils.CreateMediaItems(newMediaItems, req.body.album).then( + result => _success(res, result.newMediaItemResults), + error => _error(res, mediaError, error) + ); +}); + +interface MediaItem { + baseUrl: string; + filename: string; +} +const prefix = "google_photos_"; + +const downloadError = "Encountered an error while executing downloads."; +const requestError = "Unable to execute download: the body's media items were malformed."; +const deletionPermissionError = "Cannot perform specialized delete outside of the development environment!"; + +app.get("/deleteWithAux", async (_req, res) => { + if (release) { + return _permission_denied(res, deletionPermissionError); + } + await Database.Auxiliary.DeleteAll(); + res.redirect(RouteStore.delete); +}); + +app.get("/deleteWithGoogleCredentials", async (req, res) => { + if (release) { + return _permission_denied(res, deletionPermissionError); + } + await Database.Auxiliary.GoogleAuthenticationToken.DeleteAll(); + res.redirect(RouteStore.delete); +}); + +const UploadError = (count: number) => `Unable to upload ${count} images to Dash's server`; +app.post(RouteStore.googlePhotosMediaDownload, async (req, res) => { + const contents: { mediaItems: MediaItem[] } = req.body; + let failed = 0; + if (contents) { + const completed: Opt<DashUploadUtils.UploadInformation>[] = []; + for (let item of contents.mediaItems) { + const { contentSize, ...attributes } = await DashUploadUtils.InspectImage(item.baseUrl); + const found: Opt<DashUploadUtils.UploadInformation> = await Database.Auxiliary.QueryUploadHistory(contentSize!); + if (!found) { + const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error)); + if (upload) { + completed.push(upload); + await Database.Auxiliary.LogUpload(upload); + } else { + failed++; + } + } else { + completed.push(found); + } + } + if (failed) { + return _error(res, UploadError(failed)); + } + return _success(res, completed); + } + _invalid(res, requestError); +}); + +const _error = (res: Response, message: string, error?: any) => { + res.statusMessage = message; + res.status(STATUS.EXECUTION_ERROR).send(error); +}; + +const _success = (res: Response, body: any) => { + res.status(STATUS.OK).send(body); +}; + +const _invalid = (res: Response, message: string) => { + res.statusMessage = message; + res.status(STATUS.BAD_REQUEST).send(); +}; + +const _permission_denied = (res: Response, message: string) => { + res.statusMessage = message; + res.status(STATUS.BAD_REQUEST).send("Permission Denied!"); +}; + const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = { "number": "_n", "string": "_t", diff --git a/src/server/updateSearch.ts b/src/server/updateSearch.ts deleted file mode 100644 index 906b795f1..000000000 --- a/src/server/updateSearch.ts +++ /dev/null @@ -1,123 +0,0 @@ -import { Database } from "./database"; -import { Cursor } from "mongodb"; -import { Search } from "./Search"; -import pLimit from 'p-limit'; - -const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = { - "number": "_n", - "string": "_t", - "boolean": "_b", - // "image": ["_t", "url"], - "video": ["_t", "url"], - "pdf": ["_t", "url"], - "audio": ["_t", "url"], - "web": ["_t", "url"], - "date": ["_d", value => new Date(value.date).toISOString()], - "proxy": ["_i", "fieldId"], - "list": ["_l", list => { - const results = []; - for (const value of list.fields) { - const term = ToSearchTerm(value); - if (term) { - results.push(term.value); - } - } - return results.length ? results : null; - }] -}; - -function ToSearchTerm(val: any): { suffix: string, value: any } | undefined { - if (val === null || val === undefined) { - return; - } - const type = val.__type || typeof val; - let suffix = suffixMap[type]; - if (!suffix) { - return; - } - - if (Array.isArray(suffix)) { - const accessor = suffix[1]; - if (typeof accessor === "function") { - val = accessor(val); - } else { - val = val[accessor]; - } - suffix = suffix[0]; - } - - return { suffix, value: val }; -} - -function getSuffix(value: string | [string, any]): string { - return typeof value === "string" ? value : value[0]; -} - -const limit = pLimit(5); -async function update() { - // await new Promise(res => setTimeout(res, 5)); - console.log("update"); - await Search.Instance.clear(); - const cursor = await Database.Instance.query({}); - console.log("Cleared"); - const updates: any[] = []; - let numDocs = 0; - function updateDoc(doc: any) { - numDocs++; - if ((numDocs % 50) === 0) { - console.log("updateDoc " + numDocs); - } - // console.log("doc " + numDocs); - if (doc.__type !== "Doc") { - return; - } - const fields = doc.fields; - if (!fields) { - return; - } - const update: any = { id: doc._id }; - let dynfield = false; - for (const key in fields) { - const value = fields[key]; - const term = ToSearchTerm(value); - if (term !== undefined) { - let { suffix, value } = term; - update[key + suffix] = value; - dynfield = true; - } - } - if (dynfield) { - updates.push(update); - // console.log(updates.length); - } - } - await cursor.forEach(updateDoc); - console.log(`Updating ${updates.length} documents`); - const result = await Search.Instance.updateDocuments(updates); - try { - console.log(JSON.parse(result).responseHeader.status); - } catch { - console.log("Error:"); - // console.log(updates[i]); - console.log(result); - console.log("\n"); - } - // for (let i = 0; i < updates.length; i++) { - // console.log(i); - // const result = await Search.Instance.updateDocument(updates[i]); - // try { - // console.log(JSON.parse(result).responseHeader.status); - // } catch { - // console.log("Error:"); - // console.log(updates[i]); - // console.log(result); - // console.log("\n"); - // } - // } - // await Promise.all(updates.map(update => { - // return limit(() => Search.Instance.updateDocument(update)); - // })); - cursor.close(); -} - -update();
\ No newline at end of file |