diff options
Diffstat (limited to 'src/server')
-rw-r--r-- | src/server/DashUploadUtils.ts | 200 | ||||
-rw-r--r-- | src/server/Message.ts | 1 | ||||
-rw-r--r-- | src/server/PdfTypes.ts | 21 | ||||
-rw-r--r-- | src/server/RouteStore.ts | 9 | ||||
-rw-r--r-- | src/server/apis/google/GoogleApiServerUtils.ts | 194 | ||||
-rw-r--r-- | src/server/apis/google/GooglePhotosUploadUtils.ts | 85 | ||||
-rw-r--r-- | src/server/apis/google/SharedTypes.ts | 21 | ||||
-rw-r--r-- | src/server/authentication/config/passport.ts | 3 | ||||
-rw-r--r-- | src/server/authentication/models/current_user_utils.ts | 105 | ||||
-rw-r--r-- | src/server/credentials/google_docs_credentials.json | 12 | ||||
-rw-r--r-- | src/server/credentials/google_docs_token.json | 1 | ||||
-rw-r--r-- | src/server/database.ts | 429 | ||||
-rw-r--r-- | src/server/index.ts | 410 | ||||
-rw-r--r-- | src/server/updateSearch.ts | 123 |
14 files changed, 1143 insertions, 471 deletions
diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts new file mode 100644 index 000000000..46d897339 --- /dev/null +++ b/src/server/DashUploadUtils.ts @@ -0,0 +1,200 @@ +import * as fs from 'fs'; +import { Utils } from '../Utils'; +import * as path from 'path'; +import * as sharp from 'sharp'; +import request = require('request-promise'); +import { ExifData, ExifImage } from 'exif'; +import { Opt } from '../new_fields/Doc'; + +const uploadDirectory = path.join(__dirname, './public/files/'); + +export namespace DashUploadUtils { + + export interface Size { + width: number; + suffix: string; + } + + export const Sizes: { [size: string]: Size } = { + SMALL: { width: 100, suffix: "_s" }, + MEDIUM: { width: 400, suffix: "_m" }, + LARGE: { width: 900, suffix: "_l" }, + }; + + const gifs = [".gif"]; + const pngs = [".png"]; + const jpgs = [".jpg", ".jpeg"]; + export const imageFormats = [...pngs, ...jpgs, ...gifs]; + const videoFormats = [".mov", ".mp4"]; + + const size = "content-length"; + const type = "content-type"; + + export interface UploadInformation { + mediaPaths: string[]; + fileNames: { [key: string]: string }; + exifData: EnrichedExifData; + contentSize?: number; + contentType?: string; + } + + const generate = (prefix: string, url: string) => `${prefix}upload_${Utils.GenerateGuid()}${sanitizeExtension(url)}`; + const sanitize = (filename: string) => filename.replace(/\s+/g, "_"); + const sanitizeExtension = (source: string) => { + let extension = path.extname(source); + extension = extension.toLowerCase(); + extension = extension.split("?")[0]; + return extension; + }; + + /** + * Uploads an image specified by the @param source to Dash's /public/files/ + * directory, and returns information generated during that upload + * + * @param {string} source is either the absolute path of an already uploaded image or + * the url of a remote image + * @param {string} filename dictates what to call the image. If not specified, + * the name {@param prefix}_upload_{GUID} + * @param {string} prefix is a string prepended to the generated image name in the + * event that @param filename is not specified + * + * @returns {UploadInformation} This method returns + * 1) the paths to the uploaded images (plural due to resizing) + * 2) the file name of each of the resized images + * 3) the size of the image, in bytes (4432130) + * 4) the content type of the image, i.e. image/(jpeg | png | ...) + */ + export const UploadImage = async (source: string, filename?: string, prefix: string = ""): Promise<UploadInformation> => { + const metadata = await InspectImage(source); + return UploadInspectedImage(metadata, filename, prefix); + }; + + export interface InspectionResults { + isLocal: boolean; + stream: any; + normalizedUrl: string; + exifData: EnrichedExifData; + contentSize?: number; + contentType?: string; + } + + export interface EnrichedExifData { + data: ExifData; + error?: string; + } + + /** + * Based on the url's classification as local or remote, gleans + * as much information as possible about the specified image + * + * @param source is the path or url to the image in question + */ + export const InspectImage = async (source: string): Promise<InspectionResults> => { + const { isLocal, stream, normalized: normalizedUrl } = classify(source); + const exifData = await parseExifData(source); + const results = { + exifData, + isLocal, + stream, + normalizedUrl + }; + // stop here if local, since request.head() can't handle local paths, only urls on the web + if (isLocal) { + return results; + } + const metadata = (await new Promise<any>((resolve, reject) => { + request.head(source, async (error, res) => { + if (error) { + return reject(error); + } + resolve(res); + }); + })).headers; + return { + contentSize: parseInt(metadata[size]), + contentType: metadata[type], + ...results + }; + }; + + export const UploadInspectedImage = async (metadata: InspectionResults, filename?: string, prefix = ""): Promise<UploadInformation> => { + const { isLocal, stream, normalizedUrl, contentSize, contentType, exifData } = metadata; + const resolved = filename ? sanitize(filename) : generate(prefix, normalizedUrl); + const extension = sanitizeExtension(normalizedUrl || resolved); + let information: UploadInformation = { + mediaPaths: [], + fileNames: { clean: resolved }, + exifData, + contentSize, + contentType, + }; + return new Promise<UploadInformation>(async (resolve, reject) => { + const resizers = [ + { resizer: sharp().rotate(), suffix: "_o" }, + ...Object.values(Sizes).map(size => ({ + resizer: sharp().resize(size.width, undefined, { withoutEnlargement: true }).rotate(), + suffix: size.suffix + })) + ]; + let nonVisual = false; + if (pngs.includes(extension)) { + resizers.forEach(element => element.resizer = element.resizer.png()); + } else if (jpgs.includes(extension)) { + resizers.forEach(element => element.resizer = element.resizer.jpeg()); + } else if (![...imageFormats, ...videoFormats].includes(extension.toLowerCase())) { + nonVisual = true; + } + if (imageFormats.includes(extension)) { + for (let resizer of resizers) { + const suffix = resizer.suffix; + let mediaPath: string; + await new Promise<void>(resolve => { + const filename = resolved.substring(0, resolved.length - extension.length) + suffix + extension; + information.mediaPaths.push(mediaPath = uploadDirectory + filename); + information.fileNames[suffix] = filename; + stream(normalizedUrl).pipe(resizer.resizer).pipe(fs.createWriteStream(mediaPath)) + .on('close', resolve) + .on('error', reject); + }); + } + } + if (!isLocal || nonVisual) { + await new Promise<void>(resolve => { + stream(normalizedUrl).pipe(fs.createWriteStream(uploadDirectory + resolved)).on('close', resolve); + }); + } + resolve(information); + }); + }; + + const classify = (url: string) => { + const isLocal = /Dash-Web(\\|\/)src(\\|\/)server(\\|\/)public(\\|\/)files/g.test(url); + return { + isLocal, + stream: isLocal ? fs.createReadStream : request, + normalized: isLocal ? path.normalize(url) : url + }; + }; + + const parseExifData = async (source: string): Promise<EnrichedExifData> => { + return new Promise<EnrichedExifData>(resolve => { + new ExifImage(source, (error, data) => { + let reason: Opt<string> = undefined; + if (error) { + reason = (error as any).code; + } + resolve({ data, error: reason }); + }); + }); + }; + + export const createIfNotExists = async (path: string) => { + if (await new Promise<boolean>(resolve => fs.exists(path, resolve))) { + return true; + } + return new Promise<boolean>(resolve => fs.mkdir(path, error => resolve(error === null))); + }; + + export const Destroy = (mediaPath: string) => new Promise<boolean>(resolve => fs.unlink(mediaPath, error => resolve(error === null))); + +}
\ No newline at end of file diff --git a/src/server/Message.ts b/src/server/Message.ts index 4ec390ade..aaee143e8 100644 --- a/src/server/Message.ts +++ b/src/server/Message.ts @@ -1,5 +1,4 @@ import { Utils } from "../Utils"; -import { google, docs_v1 } from "googleapis"; export class Message<T> { private _name: string; diff --git a/src/server/PdfTypes.ts b/src/server/PdfTypes.ts new file mode 100644 index 000000000..e87f08e1d --- /dev/null +++ b/src/server/PdfTypes.ts @@ -0,0 +1,21 @@ +export interface ParsedPDF { + numpages: number; + numrender: number; + info: PDFInfo; + metadata: PDFMetadata; + version: string; //https://mozilla.github.io/pdf.js/getting_started/ + text: string; +} + +export interface PDFInfo { + PDFFormatVersion: string; + IsAcroFormPresent: boolean; + IsXFAPresent: boolean; + [key: string]: any; +} + +export interface PDFMetadata { + parse(): void; + get(name: string): string; + has(name: string): boolean; +}
\ No newline at end of file diff --git a/src/server/RouteStore.ts b/src/server/RouteStore.ts index 014906054..7426ffb39 100644 --- a/src/server/RouteStore.ts +++ b/src/server/RouteStore.ts @@ -13,6 +13,8 @@ export enum RouteStore { upload = "/upload", dataUriToImage = "/uploadURI", images = "/images", + inspectImage = "/inspectImage", + imageHierarchyExport = "/imageHierarchyExport", // USER AND WORKSPACES getCurrUser = "/getCurrentUser", @@ -31,6 +33,11 @@ export enum RouteStore { // APIS cognitiveServices = "/cognitiveservices", - googleDocs = "/googleDocs" + googleDocs = "/googleDocs", + readGoogleAccessToken = "/readGoogleAccessToken", + writeGoogleAccessToken = "/writeGoogleAccessToken", + googlePhotosMediaUpload = "/googlePhotosMediaUpload", + googlePhotosMediaDownload = "/googlePhotosMediaDownload", + googleDocsGet = "/googleDocsGet" }
\ No newline at end of file diff --git a/src/server/apis/google/GoogleApiServerUtils.ts b/src/server/apis/google/GoogleApiServerUtils.ts index 8785cd974..963c7736a 100644 --- a/src/server/apis/google/GoogleApiServerUtils.ts +++ b/src/server/apis/google/GoogleApiServerUtils.ts @@ -1,11 +1,14 @@ -import { google, docs_v1, slides_v1 } from "googleapis"; +import { google } from "googleapis"; import { createInterface } from "readline"; import { readFile, writeFile } from "fs"; -import { OAuth2Client } from "google-auth-library"; +import { OAuth2Client, Credentials } from "google-auth-library"; import { Opt } from "../../../new_fields/Doc"; import { GlobalOptions } from "googleapis-common"; import { GaxiosResponse } from "gaxios"; - +import request = require('request-promise'); +import * as qs from 'query-string'; +import Photos = require('googlephotos'); +import { Database } from "../../database"; /** * Server side authentication for Google Api queries. */ @@ -20,6 +23,9 @@ export namespace GoogleApiServerUtils { 'presentations.readonly', 'drive', 'drive.file', + 'photoslibrary', + 'photoslibrary.appendonly', + 'photoslibrary.sharing' ]; export const parseBuffer = (data: Buffer) => JSON.parse(data.toString()); @@ -29,102 +35,146 @@ export namespace GoogleApiServerUtils { Slides = "Slides" } - - export interface CredentialPaths { - credentials: string; - token: string; + export interface CredentialInformation { + credentialsPath: string; + userId: string; } export type ApiResponse = Promise<GaxiosResponse>; - export type ApiRouter = (endpoint: Endpoint, paramters: any) => ApiResponse; - export type ApiHandler = (parameters: any) => ApiResponse; + export type ApiRouter = (endpoint: Endpoint, parameters: any) => ApiResponse; + export type ApiHandler = (parameters: any, methodOptions?: any) => ApiResponse; export type Action = "create" | "retrieve" | "update"; export type Endpoint = { get: ApiHandler, create: ApiHandler, batchUpdate: ApiHandler }; export type EndpointParameters = GlobalOptions & { version: "v1" }; - export const GetEndpoint = async (sector: string, paths: CredentialPaths) => { - return new Promise<Opt<Endpoint>>((resolve, reject) => { - readFile(paths.credentials, (err, credentials) => { + export const GetEndpoint = (sector: string, paths: CredentialInformation) => { + return new Promise<Opt<Endpoint>>(resolve => { + RetrieveCredentials(paths).then(authentication => { + let routed: Opt<Endpoint>; + let parameters: EndpointParameters = { auth: authentication.client, version: "v1" }; + switch (sector) { + case Service.Documents: + routed = google.docs(parameters).documents; + break; + case Service.Slides: + routed = google.slides(parameters).presentations; + break; + } + resolve(routed); + }); + }); + }; + + export const RetrieveAccessToken = (information: CredentialInformation) => { + return new Promise<string>((resolve, reject) => { + RetrieveCredentials(information).then( + credentials => resolve(credentials.token.access_token!), + error => reject(`Error: unable to authenticate Google Photos API request.\n${error}`) + ); + }); + }; + + const RetrieveOAuthClient = async (information: CredentialInformation) => { + return new Promise<OAuth2Client>((resolve, reject) => { + readFile(information.credentialsPath, async (err, credentials) => { if (err) { reject(err); return console.log('Error loading client secret file:', err); } - return authorize(parseBuffer(credentials), paths.token).then(auth => { - let routed: Opt<Endpoint>; - let parameters: EndpointParameters = { auth, version: "v1" }; - switch (sector) { - case Service.Documents: - routed = google.docs(parameters).documents; - break; - case Service.Slides: - routed = google.slides(parameters).presentations; - break; - } - resolve(routed); - }); + const { client_secret, client_id, redirect_uris } = parseBuffer(credentials).installed; + resolve(new google.auth.OAuth2(client_id, client_secret, redirect_uris[0])); }); }); }; + export const GenerateAuthenticationUrl = async (information: CredentialInformation) => { + const client = await RetrieveOAuthClient(information); + return client.generateAuthUrl({ + access_type: 'offline', + scope: SCOPES.map(relative => prefix + relative), + }); + }; + + export const ProcessClientSideCode = async (information: CredentialInformation, authenticationCode: string): Promise<TokenResult> => { + const oAuth2Client = await RetrieveOAuthClient(information); + return new Promise<TokenResult>((resolve, reject) => { + oAuth2Client.getToken(authenticationCode, async (err, token) => { + if (err || !token) { + reject(err); + return console.error('Error retrieving access token', err); + } + oAuth2Client.setCredentials(token); + await Database.Auxiliary.GoogleAuthenticationToken.Write(information.userId, token); + resolve({ token, client: oAuth2Client }); + }); + }); + }; + export const RetrieveCredentials = (information: CredentialInformation) => { + return new Promise<TokenResult>((resolve, reject) => { + readFile(information.credentialsPath, async (err, credentials) => { + if (err) { + reject(err); + return console.log('Error loading client secret file:', err); + } + authorize(parseBuffer(credentials), information.userId).then(resolve, reject); + }); + }); + }; + + export const RetrievePhotosEndpoint = (paths: CredentialInformation) => { + return new Promise<any>((resolve, reject) => { + RetrieveAccessToken(paths).then( + token => resolve(new Photos(token)), + reject + ); + }); + }; + + type TokenResult = { token: Credentials, client: OAuth2Client }; /** * Create an OAuth2 client with the given credentials, and returns the promise resolving to the authenticated client * @param {Object} credentials The authorization client credentials. */ - export function authorize(credentials: any, token_path: string): Promise<OAuth2Client> { + export function authorize(credentials: any, userId: string): Promise<TokenResult> { const { client_secret, client_id, redirect_uris } = credentials.installed; - const oAuth2Client = new google.auth.OAuth2( - client_id, client_secret, redirect_uris[0]); - - return new Promise<OAuth2Client>((resolve, reject) => { - readFile(token_path, (err, token) => { - // Check if we have previously stored a token. - if (err) { - return getNewToken(oAuth2Client, token_path).then(resolve, reject); + const oAuth2Client = new google.auth.OAuth2(client_id, client_secret, redirect_uris[0]); + return new Promise<TokenResult>((resolve, reject) => { + // Attempting to authorize user (${userId}) + Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId).then(token => { + if (token!.expiry_date! < new Date().getTime()) { + // Token has expired, so submitting a request for a refreshed access token + return refreshToken(token!, client_id, client_secret, oAuth2Client, userId).then(resolve, reject); } - oAuth2Client.setCredentials(parseBuffer(token)); - resolve(oAuth2Client); + // Authentication successful! + oAuth2Client.setCredentials(token!); + resolve({ token: token!, client: oAuth2Client }); }); }); } - /** - * Get and store new token after prompting for user authorization, and then - * execute the given callback with the authorized OAuth2 client. - * @param {google.auth.OAuth2} oAuth2Client The OAuth2 client to get token for. - * @param {getEventsCallback} callback The callback for the authorized client. - */ - function getNewToken(oAuth2Client: OAuth2Client, token_path: string) { - return new Promise<OAuth2Client>((resolve, reject) => { - const authUrl = oAuth2Client.generateAuthUrl({ - access_type: 'offline', - scope: SCOPES.map(relative => prefix + relative), - }); - console.log('Authorize this app by visiting this url:', authUrl); - const rl = createInterface({ - input: process.stdin, - output: process.stdout, - }); - rl.question('Enter the code from that page here: ', (code) => { - rl.close(); - oAuth2Client.getToken(code, (err, token) => { - if (err || !token) { - reject(err); - return console.error('Error retrieving access token', err); - } - oAuth2Client.setCredentials(token); - // Store the token to disk for later program executions - writeFile(token_path, JSON.stringify(token), (err) => { - if (err) { - console.error(err); - reject(err); - } - console.log('Token stored to', token_path); - }); - resolve(oAuth2Client); - }); + const refreshEndpoint = "https://oauth2.googleapis.com/token"; + const refreshToken = (credentials: Credentials, client_id: string, client_secret: string, oAuth2Client: OAuth2Client, userId: string) => { + return new Promise<TokenResult>(resolve => { + let headerParameters = { headers: { 'Content-Type': 'application/x-www-form-urlencoded' } }; + let queryParameters = { + refreshToken: credentials.refresh_token, + client_id, + client_secret, + grant_type: "refresh_token" + }; + let url = `${refreshEndpoint}?${qs.stringify(queryParameters)}`; + request.post(url, headerParameters).then(async response => { + let { access_token, expires_in } = JSON.parse(response); + const expiry_date = new Date().getTime() + (expires_in * 1000); + await Database.Auxiliary.GoogleAuthenticationToken.Update(userId, access_token, expiry_date); + credentials.access_token = access_token; + credentials.expiry_date = expiry_date; + oAuth2Client.setCredentials(credentials); + resolve({ token: credentials, client: oAuth2Client }); }); }); - } + }; + }
\ No newline at end of file diff --git a/src/server/apis/google/GooglePhotosUploadUtils.ts b/src/server/apis/google/GooglePhotosUploadUtils.ts new file mode 100644 index 000000000..4a67e57cc --- /dev/null +++ b/src/server/apis/google/GooglePhotosUploadUtils.ts @@ -0,0 +1,85 @@ +import request = require('request-promise'); +import { GoogleApiServerUtils } from './GoogleApiServerUtils'; +import * as path from 'path'; +import { MediaItemCreationResult } from './SharedTypes'; +import { NewMediaItem } from "../../index"; +import { BatchedArray, TimeUnit } from 'array-batcher'; +import { DashUploadUtils } from '../../DashUploadUtils'; + +export namespace GooglePhotosUploadUtils { + + export interface Paths { + uploadDirectory: string; + credentialsPath: string; + tokenPath: string; + } + + export interface MediaInput { + url: string; + description: string; + } + + const prepend = (extension: string) => `https://photoslibrary.googleapis.com/v1/${extension}`; + const headers = (type: string) => ({ + 'Content-Type': `application/${type}`, + 'Authorization': Bearer, + }); + + let Bearer: string; + + export const initialize = async (information: GoogleApiServerUtils.CredentialInformation) => { + const token = await GoogleApiServerUtils.RetrieveAccessToken(information); + Bearer = `Bearer ${token}`; + }; + + export const DispatchGooglePhotosUpload = async (url: string) => { + if (!DashUploadUtils.imageFormats.includes(path.extname(url))) { + return undefined; + } + const body = await request(url, { encoding: null }); + const parameters = { + method: 'POST', + headers: { + ...headers('octet-stream'), + 'X-Goog-Upload-File-Name': path.basename(url), + 'X-Goog-Upload-Protocol': 'raw' + }, + uri: prepend('uploads'), + body + }; + return new Promise<any>((resolve, reject) => request(parameters, (error, _response, body) => { + if (error) { + console.log(error); + return reject(error); + } + resolve(body); + })); + }; + + export const CreateMediaItems = async (newMediaItems: NewMediaItem[], album?: { id: string }): Promise<MediaItemCreationResult> => { + const newMediaItemResults = await BatchedArray.from(newMediaItems, { batchSize: 50 }).batchedMapPatientInterval( + { magnitude: 100, unit: TimeUnit.Milliseconds }, + async (batch: NewMediaItem[]) => { + const parameters = { + method: 'POST', + headers: headers('json'), + uri: prepend('mediaItems:batchCreate'), + body: { newMediaItems: batch } as any, + json: true + }; + album && (parameters.body.albumId = album.id); + return (await new Promise<MediaItemCreationResult>((resolve, reject) => { + request(parameters, (error, _response, body) => { + if (error) { + reject(error); + } else { + resolve(body); + } + }); + })).newMediaItemResults; + } + ); + return { newMediaItemResults }; + }; + +}
\ No newline at end of file diff --git a/src/server/apis/google/SharedTypes.ts b/src/server/apis/google/SharedTypes.ts new file mode 100644 index 000000000..9ad6130b6 --- /dev/null +++ b/src/server/apis/google/SharedTypes.ts @@ -0,0 +1,21 @@ +export interface NewMediaItemResult { + uploadToken: string; + status: { code: number, message: string }; + mediaItem: MediaItem; +} + +export interface MediaItem { + id: string; + description: string; + productUrl: string; + baseUrl: string; + mimeType: string; + mediaMetadata: { + creationTime: string; + width: string; + height: string; + }; + filename: string; +} + +export type MediaItemCreationResult = { newMediaItemResults: NewMediaItemResult[] };
\ No newline at end of file diff --git a/src/server/authentication/config/passport.ts b/src/server/authentication/config/passport.ts index e87b93ddf..8915a4abf 100644 --- a/src/server/authentication/config/passport.ts +++ b/src/server/authentication/config/passport.ts @@ -1,7 +1,6 @@ import * as passport from 'passport'; import * as passportLocal from 'passport-local'; -import * as mongodb from 'mongodb'; -import * as _ from "lodash"; +import _ from "lodash"; import { default as User } from '../models/user_model'; import { Request, Response, NextFunction } from "express"; import { RouteStore } from '../../RouteStore'; diff --git a/src/server/authentication/models/current_user_utils.ts b/src/server/authentication/models/current_user_utils.ts index f7ce24967..0fbfbf2f3 100644 --- a/src/server/authentication/models/current_user_utils.ts +++ b/src/server/authentication/models/current_user_utils.ts @@ -2,12 +2,11 @@ import { action, computed, observable, runInAction } from "mobx"; import * as rp from 'request-promise'; import { DocServer } from "../../../client/DocServer"; import { Docs } from "../../../client/documents/Documents"; -import { Gateway, NorthstarSettings } from "../../../client/northstar/manager/Gateway"; import { Attribute, AttributeGroup, Catalog, Schema } from "../../../client/northstar/model/idea/idea"; import { ArrayUtil } from "../../../client/northstar/utils/ArrayUtil"; import { CollectionViewType } from "../../../client/views/collections/CollectionBaseView"; import { CollectionView } from "../../../client/views/collections/CollectionView"; -import { Doc } from "../../../new_fields/Doc"; +import { Doc, DocListCast } from "../../../new_fields/Doc"; import { List } from "../../../new_fields/List"; import { listSpec } from "../../../new_fields/Schema"; import { Cast, StrCast, PromiseValue } from "../../../new_fields/Types"; @@ -24,6 +23,9 @@ export class CurrentUserUtils { public static get MainDocId() { return this.mainDocId; } public static set MainDocId(id: string | undefined) { this.mainDocId = id; } + @observable public static GuestTarget: Doc | undefined; + @observable public static GuestWorkspace: Doc | undefined; + private static createUserDocument(id: string): Doc { let doc = new Doc(id, true); doc.viewType = CollectionViewType.Tree; @@ -36,46 +38,99 @@ export class CurrentUserUtils { doc.xMargin = 5; doc.yMargin = 5; doc.boxShadow = "0 0"; - doc.excludeFromLibrary = true; doc.optionalRightCollection = Docs.Create.StackingDocument([], { title: "New mobile uploads" }); return doc; } static updateUserDocument(doc: Doc) { + + // setup workspaces library item if (doc.workspaces === undefined) { - const workspaces = Docs.Create.TreeDocument([], { title: "Workspaces", height: 100 }); - workspaces.excludeFromLibrary = true; - workspaces.workspaceLibrary = true; + const workspaces = Docs.Create.TreeDocument([], { title: "Workspaces".toUpperCase(), height: 100 }); workspaces.boxShadow = "0 0"; doc.workspaces = workspaces; } - PromiseValue(Cast(doc.workspaces, Doc)).then(workspaces => workspaces && (workspaces.preventTreeViewOpen = true)); + PromiseValue(Cast(doc.workspaces, Doc)).then(workspaces => { + if (workspaces) { + workspaces.backgroundColor = "#eeeeee"; + workspaces.preventTreeViewOpen = true; + workspaces.forceActive = true; + workspaces.lockedPosition = true; + if (StrCast(workspaces.title) === "Workspaces") { + workspaces.title = "WORKSPACES"; + } + } + }); + + // setup notes list + if (doc.noteTypes === undefined) { + let notes = [Docs.Create.TextDocument({ title: "Note", backgroundColor: "yellow", isTemplate: true }), + Docs.Create.TextDocument({ title: "Idea", backgroundColor: "pink", isTemplate: true }), + Docs.Create.TextDocument({ title: "Topic", backgroundColor: "lightBlue", isTemplate: true }), + Docs.Create.TextDocument({ title: "Person", backgroundColor: "lightGreen", isTemplate: true })]; + const noteTypes = Docs.Create.TreeDocument(notes, { title: "Note Types", height: 75 }); + doc.noteTypes = noteTypes; + } + PromiseValue(Cast(doc.noteTypes, Doc)).then(noteTypes => noteTypes && PromiseValue(noteTypes.data).then(DocListCast)); + + // setup Recently Closed library item if (doc.recentlyClosed === undefined) { - const recentlyClosed = Docs.Create.TreeDocument([], { title: "Recently Closed", height: 75 }); - recentlyClosed.excludeFromLibrary = true; + const recentlyClosed = Docs.Create.TreeDocument([], { title: "Recently Closed".toUpperCase(), height: 75 }); recentlyClosed.boxShadow = "0 0"; doc.recentlyClosed = recentlyClosed; } - PromiseValue(Cast(doc.recentlyClosed, Doc)).then(recent => recent && (recent.preventTreeViewOpen = true)); + PromiseValue(Cast(doc.recentlyClosed, Doc)).then(recent => { + if (recent) { + recent.backgroundColor = "#eeeeee"; + recent.preventTreeViewOpen = true; + recent.forceActive = true; + recent.lockedPosition = true; + if (StrCast(recent.title) === "Recently Closed") { + recent.title = "RECENTLY CLOSED"; + } + } + }); + + if (doc.curPresentation === undefined) { const curPresentation = Docs.Create.PresDocument(new List<Doc>(), { title: "Presentation" }); - curPresentation.excludeFromLibrary = true; curPresentation.boxShadow = "0 0"; doc.curPresentation = curPresentation; } + if (doc.sidebar === undefined) { const sidebar = Docs.Create.StackingDocument([doc.workspaces as Doc, doc, doc.recentlyClosed as Doc], { title: "Sidebar" }); - sidebar.excludeFromLibrary = true; + sidebar.forceActive = true; + sidebar.lockedPosition = true; sidebar.gridGap = 5; sidebar.xMargin = 5; sidebar.yMargin = 5; - Doc.GetProto(sidebar).backgroundColor = "#aca3a6"; sidebar.boxShadow = "1 1 3"; doc.sidebar = sidebar; } - StrCast(doc.title).indexOf("@") !== -1 && (doc.title = StrCast(doc.title).split("@")[0] + "'s Library"); + PromiseValue(Cast(doc.sidebar, Doc)).then(sidebar => { + if (sidebar) { + sidebar.backgroundColor = "lightgrey"; + } + }); + + if (doc.overlays === undefined) { + const overlays = Docs.Create.FreeformDocument([], { title: "Overlays" }); + Doc.GetProto(overlays).backgroundColor = "#aca3a6"; + doc.overlays = overlays; + } + + if (doc.linkFollowBox === undefined) { + PromiseValue(Cast(doc.overlays, Doc)).then(overlays => overlays && Doc.AddDocToList(overlays, "data", doc.linkFollowBox = Docs.Create.LinkFollowBoxDocument({ x: 250, y: 20, width: 500, height: 370, title: "Link Follower" }))); + } + + StrCast(doc.title).indexOf("@") !== -1 && (doc.title = (StrCast(doc.title).split("@")[0] + "'s Library").toUpperCase()); + StrCast(doc.title).indexOf("'s Library") !== -1 && (doc.title = StrCast(doc.title).toUpperCase()); + doc.backgroundColor = "#eeeeee"; doc.width = 100; doc.preventTreeViewOpen = true; + doc.forceActive = true; + doc.lockedPosition = true; } public static loadCurrentUser() { @@ -93,7 +148,7 @@ export class CurrentUserUtils { this.curr_id = id; Doc.CurrentUserEmail = email; await rp.get(Utils.prepend(RouteStore.getUserDocumentId)).then(id => { - if (id) { + if (id && id !== "guest") { return DocServer.GetRefField(id).then(async field => { if (field instanceof Doc) { await this.updateUserDocument(field); @@ -106,17 +161,17 @@ export class CurrentUserUtils { throw new Error("There should be a user id! Why does Dash think there isn't one?"); } }); - try { - const getEnvironment = await fetch("/assets/env.json", { redirect: "follow", method: "GET", credentials: "include" }); - NorthstarSettings.Instance.UpdateEnvironment(await getEnvironment.json()); - await Gateway.Instance.ClearCatalog(); - const extraSchemas = Cast(CurrentUserUtils.UserDocument.DBSchemas, listSpec("string"), []); - let extras = await Promise.all(extraSchemas.map(sc => Gateway.Instance.GetSchema("", sc))); - let catprom = CurrentUserUtils.SetNorthstarCatalog(await Gateway.Instance.GetCatalog(), extras); - // if (catprom) await Promise.all(catprom); - } catch (e) { + // try { + // const getEnvironment = await fetch("/assets/env.json", { redirect: "follow", method: "GET", credentials: "include" }); + // NorthstarSettings.Instance.UpdateEnvironment(await getEnvironment.json()); + // await Gateway.Instance.ClearCatalog(); + // const extraSchemas = Cast(CurrentUserUtils.UserDocument.DBSchemas, listSpec("string"), []); + // let extras = await Promise.all(extraSchemas.map(sc => Gateway.Instance.GetSchema("", sc))); + // let catprom = CurrentUserUtils.SetNorthstarCatalog(await Gateway.Instance.GetCatalog(), extras); + // // if (catprom) await Promise.all(catprom); + // } catch (e) { - } + // } } /* Northstar catalog ... really just for testing so this should eventually go away */ diff --git a/src/server/credentials/google_docs_credentials.json b/src/server/credentials/google_docs_credentials.json index 8d097d363..955c5a3c1 100644 --- a/src/server/credentials/google_docs_credentials.json +++ b/src/server/credentials/google_docs_credentials.json @@ -1 +1,11 @@ -{"installed":{"client_id":"343179513178-ud6tvmh275r2fq93u9eesrnc66t6akh9.apps.googleusercontent.com","project_id":"quickstart-1565056383187","auth_uri":"https://accounts.google.com/o/oauth2/auth","token_uri":"https://oauth2.googleapis.com/token","auth_provider_x509_cert_url":"https://www.googleapis.com/oauth2/v1/certs","client_secret":"w8KIFSc0MQpmUYHed4qEzn8b","redirect_uris":["urn:ietf:wg:oauth:2.0:oob","http://localhost"]}}
\ No newline at end of file +{ + "installed": { + "client_id": "343179513178-ud6tvmh275r2fq93u9eesrnc66t6akh9.apps.googleusercontent.com", + "project_id": "quickstart-1565056383187", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://oauth2.googleapis.com/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_secret": "w8KIFSc0MQpmUYHed4qEzn8b", + "redirect_uris": ["urn:ietf:wg:oauth:2.0:oob", "http://localhost"] + } +}
\ No newline at end of file diff --git a/src/server/credentials/google_docs_token.json b/src/server/credentials/google_docs_token.json deleted file mode 100644 index 07c02d56c..000000000 --- a/src/server/credentials/google_docs_token.json +++ /dev/null @@ -1 +0,0 @@ -{"access_token":"ya29.GltjB4-x03xFpd2NY2555cxg1xlT_ajqRi78M9osOfdOF2jTIjlPkn_UZL8cUwVP0DPC8rH3vhhg8RpspFe8Vewx92shAO3RPos_uMH0CUqEiCiZlaaB5I3Jq3Mv","refresh_token":"1/teUKUqGKMLjVqs-eed0L8omI02pzSxMUYaxGc2QxBw0","scope":"https://www.googleapis.com/auth/documents https://www.googleapis.com/auth/drive https://www.googleapis.com/auth/drive.file https://www.googleapis.com/auth/documents.readonly","token_type":"Bearer","expiry_date":1565654175862}
\ No newline at end of file diff --git a/src/server/database.ts b/src/server/database.ts index a7254fb0c..990441d5a 100644 --- a/src/server/database.ts +++ b/src/server/database.ts @@ -1,209 +1,294 @@ import * as mongodb from 'mongodb'; import { Transferable } from './Message'; +import { Opt } from '../new_fields/Doc'; +import { Utils, emptyFunction } from '../Utils'; +import { DashUploadUtils } from './DashUploadUtils'; +import { Credentials } from 'google-auth-library'; -export class Database { - public static DocumentsCollection = 'documents'; - public static Instance = new Database(); - private MongoClient = mongodb.MongoClient; - private url = 'mongodb://localhost:27017/Dash'; - private currentWrites: { [id: string]: Promise<void> } = {}; - private db?: mongodb.Db; - private onConnect: (() => void)[] = []; - - constructor() { - this.MongoClient.connect(this.url, (err, client) => { - this.db = client.db(); - this.onConnect.forEach(fn => fn()); - }); - } +export namespace Database { - public update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = Database.DocumentsCollection) { - if (this.db) { - let collection = this.db.collection(collectionName); - const prom = this.currentWrites[id]; - let newProm: Promise<void>; - const run = (): Promise<void> => { - return new Promise<void>(resolve => { - collection.updateOne({ _id: id }, value, { upsert } - , (err, res) => { - if (this.currentWrites[id] === newProm) { - delete this.currentWrites[id]; - } - resolve(); - callback(err, res); - }); - }); - }; - newProm = prom ? prom.then(run) : run(); - this.currentWrites[id] = newProm; - } else { - this.onConnect.push(() => this.update(id, value, callback, upsert, collectionName)); - } - } + class Database { + public static DocumentsCollection = 'documents'; + private MongoClient = mongodb.MongoClient; + private url = 'mongodb://localhost:27017/Dash'; + private currentWrites: { [id: string]: Promise<void> } = {}; + private db?: mongodb.Db; + private onConnect: (() => void)[] = []; - public replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = Database.DocumentsCollection) { - if (this.db) { - let collection = this.db.collection(collectionName); - const prom = this.currentWrites[id]; - let newProm: Promise<void>; - const run = (): Promise<void> => { - return new Promise<void>(resolve => { - collection.replaceOne({ _id: id }, value, { upsert } - , (err, res) => { - if (this.currentWrites[id] === newProm) { - delete this.currentWrites[id]; - } - resolve(); - callback(err, res); - }); - }); - }; - newProm = prom ? prom.then(run) : run(); - this.currentWrites[id] = newProm; - } else { - this.onConnect.push(() => this.replace(id, value, callback, upsert, collectionName)); + constructor() { + this.MongoClient.connect(this.url, (err, client) => { + this.db = client.db(); + this.onConnect.forEach(fn => fn()); + }); } - } - public delete(query: any, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; - public delete(id: string, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; - public delete(id: any, collectionName = Database.DocumentsCollection) { - if (typeof id === "string") { - id = { _id: id }; - } - if (this.db) { - const db = this.db; - return new Promise(res => db.collection(collectionName).deleteMany(id, (err, result) => res(result))); - } else { - return new Promise(res => this.onConnect.push(() => res(this.delete(id, collectionName)))); + public async update(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = Database.DocumentsCollection) { + if (this.db) { + let collection = this.db.collection(collectionName); + const prom = this.currentWrites[id]; + let newProm: Promise<void>; + const run = (): Promise<void> => { + return new Promise<void>(resolve => { + collection.updateOne({ _id: id }, value, { upsert } + , (err, res) => { + if (this.currentWrites[id] === newProm) { + delete this.currentWrites[id]; + } + resolve(); + callback(err, res); + }); + }); + }; + newProm = prom ? prom.then(run) : run(); + this.currentWrites[id] = newProm; + return newProm; + } else { + this.onConnect.push(() => this.update(id, value, callback, upsert, collectionName)); + } } - } - public deleteAll(collectionName = Database.DocumentsCollection): Promise<any> { - return new Promise(res => { + public replace(id: string, value: any, callback: (err: mongodb.MongoError, res: mongodb.UpdateWriteOpResult) => void, upsert = true, collectionName = Database.DocumentsCollection) { if (this.db) { - this.db.collection(collectionName).deleteMany({}, res); + let collection = this.db.collection(collectionName); + const prom = this.currentWrites[id]; + let newProm: Promise<void>; + const run = (): Promise<void> => { + return new Promise<void>(resolve => { + collection.replaceOne({ _id: id }, value, { upsert } + , (err, res) => { + if (this.currentWrites[id] === newProm) { + delete this.currentWrites[id]; + } + resolve(); + callback(err, res); + }); + }); + }; + newProm = prom ? prom.then(run) : run(); + this.currentWrites[id] = newProm; } else { - this.onConnect.push(() => this.db && this.db.collection(collectionName).deleteMany({}, res)); + this.onConnect.push(() => this.replace(id, value, callback, upsert, collectionName)); } - }); - } + } - public insert(value: any, collectionName = Database.DocumentsCollection) { - if (this.db) { - if ("id" in value) { - value._id = value.id; - delete value.id; + public delete(query: any, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; + public delete(id: string, collectionName?: string): Promise<mongodb.DeleteWriteOpResultObject>; + public delete(id: any, collectionName = Database.DocumentsCollection) { + if (typeof id === "string") { + id = { _id: id }; + } + if (this.db) { + const db = this.db; + return new Promise(res => db.collection(collectionName).deleteMany(id, (err, result) => res(result))); + } else { + return new Promise(res => this.onConnect.push(() => res(this.delete(id, collectionName)))); } - const id = value._id; - const collection = this.db.collection(collectionName); - const prom = this.currentWrites[id]; - let newProm: Promise<void>; - const run = (): Promise<void> => { - return new Promise<void>(resolve => { - collection.insertOne(value, (err, res) => { - if (this.currentWrites[id] === newProm) { - delete this.currentWrites[id]; - } - resolve(); - }); - }); - }; - newProm = prom ? prom.then(run) : run(); - this.currentWrites[id] = newProm; - } else { - this.onConnect.push(() => this.insert(value, collectionName)); } - } - public getDocument(id: string, fn: (result?: Transferable) => void, collectionName = Database.DocumentsCollection) { - if (this.db) { - this.db.collection(collectionName).findOne({ _id: id }, (err, result) => { - if (result) { - result.id = result._id; - delete result._id; - fn(result); + public async deleteAll(collectionName = Database.DocumentsCollection, persist = true): Promise<any> { + return new Promise(resolve => { + const executor = async (database: mongodb.Db) => { + if (persist) { + await database.collection(collectionName).deleteMany({}); + } else { + await database.dropCollection(collectionName); + } + resolve(); + }; + if (this.db) { + executor(this.db); } else { - fn(undefined); + this.onConnect.push(() => this.db && executor(this.db)); } }); - } else { - this.onConnect.push(() => this.getDocument(id, fn, collectionName)); } - } - public getDocuments(ids: string[], fn: (result: Transferable[]) => void, collectionName = Database.DocumentsCollection) { - if (this.db) { - this.db.collection(collectionName).find({ _id: { "$in": ids } }).toArray((err, docs) => { - if (err) { - console.log(err.message); - console.log(err.errmsg); + public async insert(value: any, collectionName = Database.DocumentsCollection) { + if (this.db) { + if ("id" in value) { + value._id = value.id; + delete value.id; } - fn(docs.map(doc => { - doc.id = doc._id; - delete doc._id; - return doc; - })); - }); - } else { - this.onConnect.push(() => this.getDocuments(ids, fn, collectionName)); + const id = value._id; + const collection = this.db.collection(collectionName); + const prom = this.currentWrites[id]; + let newProm: Promise<void>; + const run = (): Promise<void> => { + return new Promise<void>(resolve => { + collection.insertOne(value, (err, res) => { + if (this.currentWrites[id] === newProm) { + delete this.currentWrites[id]; + } + resolve(); + }); + }); + }; + newProm = prom ? prom.then(run) : run(); + this.currentWrites[id] = newProm; + return newProm; + } else { + this.onConnect.push(() => this.insert(value, collectionName)); + } } - } - public async visit(ids: string[], fn: (result: any) => string[], collectionName = "newDocuments"): Promise<void> { - if (this.db) { - const visited = new Set<string>(); - while (ids.length) { - const count = Math.min(ids.length, 1000); - const index = ids.length - count; - const fetchIds = ids.splice(index, count).filter(id => !visited.has(id)); - if (!fetchIds.length) { - continue; - } - const docs = await new Promise<{ [key: string]: any }[]>(res => Database.Instance.getDocuments(fetchIds, res, "newDocuments")); - for (const doc of docs) { - const id = doc.id; - visited.add(id); - ids.push(...fn(doc)); + public getDocument(id: string, fn: (result?: Transferable) => void, collectionName = "newDocuments") { + if (this.db) { + this.db.collection(collectionName).findOne({ _id: id }, (err, result) => { + if (result) { + result.id = result._id; + delete result._id; + fn(result); + } else { + fn(undefined); + } + }); + } else { + this.onConnect.push(() => this.getDocument(id, fn, collectionName)); + } + } + + public getDocuments(ids: string[], fn: (result: Transferable[]) => void, collectionName = Database.DocumentsCollection) { + if (this.db) { + this.db.collection(collectionName).find({ _id: { "$in": ids } }).toArray((err, docs) => { + if (err) { + console.log(err.message); + console.log(err.errmsg); + } + fn(docs.map(doc => { + doc.id = doc._id; + delete doc._id; + return doc; + })); + }); + } else { + this.onConnect.push(() => this.getDocuments(ids, fn, collectionName)); + } + } + + public async visit(ids: string[], fn: (result: any) => string[] | Promise<string[]>, collectionName = "newDocuments"): Promise<void> { + if (this.db) { + const visited = new Set<string>(); + while (ids.length) { + const count = Math.min(ids.length, 1000); + const index = ids.length - count; + const fetchIds = ids.splice(index, count).filter(id => !visited.has(id)); + if (!fetchIds.length) { + continue; + } + const docs = await new Promise<{ [key: string]: any }[]>(res => Instance.getDocuments(fetchIds, res, "newDocuments")); + for (const doc of docs) { + const id = doc.id; + visited.add(id); + ids.push(...(await fn(doc))); + } } + } else { + return new Promise(res => { + this.onConnect.push(() => { + this.visit(ids, fn, collectionName); + res(); + }); + }); } + } - } else { - return new Promise(res => { - this.onConnect.push(() => { - this.visit(ids, fn, collectionName); - res(); + public query(query: { [key: string]: any }, projection?: { [key: string]: 0 | 1 }, collectionName = "newDocuments"): Promise<mongodb.Cursor> { + if (this.db) { + let cursor = this.db.collection(collectionName).find(query); + if (projection) { + cursor = cursor.project(projection); + } + return Promise.resolve<mongodb.Cursor>(cursor); + } else { + return new Promise<mongodb.Cursor>(res => { + this.onConnect.push(() => res(this.query(query, projection, collectionName))); }); - }); + } } - } - public query(query: { [key: string]: any }, projection?: { [key: string]: 0 | 1 }, collectionName = "newDocuments"): Promise<mongodb.Cursor> { - if (this.db) { - let cursor = this.db.collection(collectionName).find(query); - if (projection) { - cursor = cursor.project(projection); + public updateMany(query: any, update: any, collectionName = "newDocuments") { + if (this.db) { + const db = this.db; + return new Promise<mongodb.WriteOpResult>(res => db.collection(collectionName).update(query, update, (_, result) => res(result))); + } else { + return new Promise<mongodb.WriteOpResult>(res => { + this.onConnect.push(() => this.updateMany(query, update, collectionName).then(res)); + }); } - return Promise.resolve<mongodb.Cursor>(cursor); - } else { - return new Promise<mongodb.Cursor>(res => { - this.onConnect.push(() => res(this.query(query, projection, collectionName))); - }); } - } - public updateMany(query: any, update: any, collectionName = "newDocuments") { - if (this.db) { - const db = this.db; - return new Promise<mongodb.WriteOpResult>(res => db.collection(collectionName).update(query, update, (_, result) => res(result))); - } else { - return new Promise<mongodb.WriteOpResult>(res => { - this.onConnect.push(() => this.updateMany(query, update, collectionName).then(res)); - }); + public print() { + console.log("db says hi!"); } } - public print() { - console.log("db says hi!"); + export const Instance = new Database(); + + export namespace Auxiliary { + + export enum AuxiliaryCollections { + GooglePhotosUploadHistory = "uploadedFromGooglePhotos" + } + + const SanitizedCappedQuery = async (query: { [key: string]: any }, collection: string, cap: number, removeId = true) => { + const cursor = await Instance.query(query, undefined, collection); + const results = await cursor.toArray(); + const slice = results.slice(0, Math.min(cap, results.length)); + return removeId ? slice.map(result => { + delete result._id; + return result; + }) : slice; + }; + + const SanitizedSingletonQuery = async <T>(query: { [key: string]: any }, collection: string, removeId = true): Promise<Opt<T>> => { + const results = await SanitizedCappedQuery(query, collection, 1, removeId); + return results.length ? results[0] : undefined; + }; + + export const QueryUploadHistory = async (contentSize: number) => { + return SanitizedSingletonQuery<DashUploadUtils.UploadInformation>({ contentSize }, AuxiliaryCollections.GooglePhotosUploadHistory); + }; + + export namespace GoogleAuthenticationToken { + + const GoogleAuthentication = "googleAuthentication"; + + export type StoredCredentials = Credentials & { _id: string }; + + export const Fetch = async (userId: string, removeId = true) => { + return SanitizedSingletonQuery<StoredCredentials>({ userId }, GoogleAuthentication, removeId); + }; + + export const Write = async (userId: string, token: any) => { + return Instance.insert({ userId, canAccess: [], ...token }, GoogleAuthentication); + }; + + export const Update = async (userId: string, access_token: string, expiry_date: number) => { + const entry = await Fetch(userId, false); + if (entry) { + const parameters = { $set: { access_token, expiry_date } }; + return Instance.update(entry._id, parameters, emptyFunction, true, GoogleAuthentication); + } + }; + + export const DeleteAll = () => Instance.deleteAll(GoogleAuthentication, false); + + } + + export const LogUpload = async (information: DashUploadUtils.UploadInformation) => { + const bundle = { + _id: Utils.GenerateDeterministicGuid(String(information.contentSize!)), + ...information + }; + return Instance.insert(bundle, AuxiliaryCollections.GooglePhotosUploadHistory); + }; + + export const DeleteAll = async (persist = false) => { + const collectionNames = Object.values(AuxiliaryCollections); + const pendingDeletions = collectionNames.map(name => Instance.deleteAll(name, persist)); + return Promise.all(pendingDeletions); + }; + } -} + +}
\ No newline at end of file diff --git a/src/server/index.ts b/src/server/index.ts index de46ebf71..62938b9c7 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -29,26 +29,32 @@ import { RouteStore } from './RouteStore'; import v4 = require('uuid/v4'); const app = express(); const config = require('../../webpack.config'); -import { createCanvas, loadImage, Canvas } from "canvas"; +import { createCanvas } from "canvas"; const compiler = webpack(config); const port = 1050; // default port to listen const serverPort = 4321; import expressFlash = require('express-flash'); import flash = require('connect-flash'); import { Search } from './Search'; -import _ = require('lodash'); import * as Archiver from 'archiver'; var AdmZip = require('adm-zip'); import * as YoutubeApi from "./apis/youtube/youtubeApiSample"; import { Response } from 'express-serve-static-core'; import { GoogleApiServerUtils } from "./apis/google/GoogleApiServerUtils"; -import { GaxiosResponse } from 'gaxios'; -import { Opt } from '../new_fields/Doc'; -import { docs_v1 } from 'googleapis'; -import { Endpoint } from 'googleapis-common'; const MongoStore = require('connect-mongo')(session); const mongoose = require('mongoose'); const probe = require("probe-image-size"); +const pdf = require('pdf-parse'); +var findInFiles = require('find-in-files'); +import { GooglePhotosUploadUtils } from './apis/google/GooglePhotosUploadUtils'; +import * as qs from 'query-string'; +import { Opt } from '../new_fields/Doc'; +import { DashUploadUtils } from './DashUploadUtils'; +import { BatchedArray, TimeUnit } from 'array-batcher'; +import { ParsedPDF } from "./PdfTypes"; +import { reject } from 'bluebird'; +import { ExifData } from 'exif'; +import { Result } from '../client/northstar/model/idea/idea'; const download = (url: string, dest: fs.PathLike) => request.get(url).pipe(fs.createWriteStream(dest)); let youtubeApiKey: string; @@ -115,7 +121,9 @@ function addSecureRoute(method: Method, ...subscribers: string[] ) { let abstracted = (req: express.Request, res: express.Response) => { - if (req.user) { + let sharing = qs.parse(qs.extract(req.originalUrl), { sort: false }).sharing === "true"; + sharing = sharing && req.originalUrl.startsWith("/doc/"); + if (req.user || sharing) { handler(req.user as any, res, req); } else { req.session!.target = req.originalUrl; @@ -157,6 +165,13 @@ app.get("/buxton", (req, res) => { command_line('python scraper.py', cwd).then(onResolved, tryPython3); }); +const STATUS = { + OK: 200, + BAD_REQUEST: 400, + EXECUTION_ERROR: 500, + PERMISSION_DENIED: 403 +}; + const command_line = (command: string, fromDirectory?: string) => { return new Promise<string>((resolve, reject) => { let options: ExecOptions = {}; @@ -196,6 +211,23 @@ const solrURL = "http://localhost:8983/solr/#/dash"; // GETTERS +app.get("/textsearch", async (req, res) => { + let q = req.query.q; + console.log("TEXTSEARCH " + q); + if (q === undefined) { + res.send([]); + return; + } + let results = await findInFiles.find({ 'term': q, 'flags': 'ig' }, uploadDirectory + "text", ".txt$"); + let resObj: { ids: string[], numFound: number, lines: string[] } = { ids: [], numFound: 0, lines: [] }; + for (var result in results) { + resObj.ids.push(path.basename(result, ".txt").replace(/upload_/, "")); + resObj.lines.push(results[result].line); + resObj.numFound++; + } + res.send(resObj); +}); + app.get("/search", async (req, res) => { const solrQuery: any = {}; ["q", "fq", "start", "rows", "hl", "hl.fl"].forEach(key => solrQuery[key] = req.query[key]); @@ -289,6 +321,76 @@ app.get("/serializeDoc/:docId", async (req, res) => { res.send({ docs, files: Array.from(files) }); }); +export type Hierarchy = { [id: string]: string | Hierarchy }; +export type ZipMutator = (file: Archiver.Archiver) => void | Promise<void>; + +app.get(`${RouteStore.imageHierarchyExport}/:docId`, async (req, res) => { + const id = req.params.docId; + const hierarchy: Hierarchy = {}; + await targetedVisitorRecursive(id, hierarchy); + BuildAndDispatchZip(res, async zip => { + await hierarchyTraverserRecursive(zip, hierarchy); + }); +}); + +const BuildAndDispatchZip = async (res: Response, mutator: ZipMutator): Promise<void> => { + const zip = Archiver('zip'); + zip.pipe(res); + await mutator(zip); + return zip.finalize(); +}; + +const targetedVisitorRecursive = async (seedId: string, hierarchy: Hierarchy): Promise<void> => { + const local: Hierarchy = {}; + const { title, data } = await getData(seedId); + const label = `${title} (${seedId})`; + if (Array.isArray(data)) { + hierarchy[label] = local; + await Promise.all(data.map(proxy => targetedVisitorRecursive(proxy.fieldId, local))); + } else { + hierarchy[label + path.extname(data)] = data; + } +}; + +const getData = async (seedId: string): Promise<{ data: string | any[], title: string }> => { + return new Promise<{ data: string | any[], title: string }>((resolve, reject) => { + Database.Instance.getDocument(seedId, async (result: any) => { + const { data, proto, title } = result.fields; + if (data) { + if (data.url) { + resolve({ data: data.url, title }); + } else if (data.fields) { + resolve({ data: data.fields, title }); + } else { + reject(); + } + } + if (proto) { + getData(proto.fieldId).then(resolve, reject); + } + }); + }); +}; + +const hierarchyTraverserRecursive = async (file: Archiver.Archiver, hierarchy: Hierarchy, prefix = "Dash Export"): Promise<void> => { + for (const key of Object.keys(hierarchy)) { + const result = hierarchy[key]; + if (typeof result === "string") { + let path: string; + let matches: RegExpExecArray | null; + if ((matches = /\:1050\/files\/(upload\_[\da-z]{32}.*)/g.exec(result)) !== null) { + path = `${__dirname}/public/files/${matches[1]}`; + } else { + const information = await DashUploadUtils.UploadImage(result); + path = information.mediaPaths[0]; + } + file.file(path, { name: key, prefix }); + } else { + await hierarchyTraverserRecursive(file, result, `${prefix}/${key}`); + } + } +}; + app.get("/downloadId/:docId", async (req, res) => { res.set('Content-disposition', `attachment;`); res.set('Content-Type', "application/zip"); @@ -420,10 +522,10 @@ app.get("/thumbnail/:filename", (req, res) => { let filename = req.params.filename; let noExt = filename.substring(0, filename.length - ".png".length); let pagenumber = parseInt(noExt.split('-')[1]); - fs.exists(uploadDir + filename, (exists: boolean) => { - console.log(`${uploadDir + filename} ${exists ? "exists" : "does not exist"}`); + fs.exists(uploadDirectory + filename, (exists: boolean) => { + console.log(`${uploadDirectory + filename} ${exists ? "exists" : "does not exist"}`); if (exists) { - let input = fs.createReadStream(uploadDir + filename); + let input = fs.createReadStream(uploadDirectory + filename); probe(input, (err: any, result: any) => { if (err) { console.log(err); @@ -434,7 +536,7 @@ app.get("/thumbnail/:filename", (req, res) => { }); } else { - LoadPage(uploadDir + filename.substring(0, filename.length - noExt.split('-')[1].length - ".PNG".length - 1) + ".pdf", pagenumber, res); + LoadPage(uploadDirectory + filename.substring(0, filename.length - noExt.split('-')[1].length - ".PNG".length - 1) + ".pdf", pagenumber, res); } }); }); @@ -503,21 +605,20 @@ addSecureRoute( res.sendFile(path.join(__dirname, '../../deploy/' + filename)); }, undefined, - RouteStore.home, - RouteStore.openDocumentWithId + RouteStore.home, RouteStore.openDocumentWithId ); addSecureRoute( Method.GET, - (user, res) => res.send(user.userDocumentId || ""), - undefined, + (user, res) => res.send(user.userDocumentId), + (res) => res.send(undefined), RouteStore.getUserDocumentId, ); addSecureRoute( Method.GET, - (user, res) => res.send(JSON.stringify({ id: user.id, email: user.email })), - undefined, + (user, res) => { res.send(JSON.stringify({ id: user.id, email: user.email })); }, + (res) => res.send(JSON.stringify({ id: "__guest__", email: "" })), RouteStore.getCurrUser ); @@ -556,54 +657,65 @@ class NodeCanvasFactory { } const pngTypes = [".png", ".PNG"]; -const pdfTypes = [".pdf", ".PDF"]; const jpgTypes = [".jpg", ".JPG", ".jpeg", ".JPEG"]; -const uploadDir = __dirname + "/public/files/"; +const uploadDirectory = __dirname + "/public/files/"; +const pdfDirectory = uploadDirectory + "text"; +DashUploadUtils.createIfNotExists(pdfDirectory); + +interface ImageFileResponse { + name: string; + path: string; + type: string; + exif: Opt<DashUploadUtils.EnrichedExifData>; +} + // SETTERS app.post( RouteStore.upload, (req, res) => { let form = new formidable.IncomingForm(); - form.uploadDir = uploadDir; + form.uploadDir = uploadDirectory; form.keepExtensions = true; - // let path = req.body.path; - console.log("upload"); - form.parse(req, (err, fields, files) => { - console.log("parsing"); - let names: string[] = []; - for (const name in files) { - const file = path.basename(files[name].path); - const ext = path.extname(file); - let resizers = [ - { resizer: sharp().rotate(), suffix: "_o" }, - { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }).rotate(), suffix: "_s" }, - { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }).rotate(), suffix: "_m" }, - { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }).rotate(), suffix: "_l" }, - ]; - let isImage = false; - if (pngTypes.includes(ext)) { - resizers.forEach(element => { - element.resizer = element.resizer.png(); - }); - isImage = true; - } else if (jpgTypes.includes(ext)) { - resizers.forEach(element => { - element.resizer = element.resizer.jpeg(); - }); - isImage = true; - } - if (isImage) { - resizers.forEach(resizer => { - fs.createReadStream(uploadDir + file).pipe(resizer.resizer).pipe(fs.createWriteStream(uploadDir + file.substring(0, file.length - ext.length) + resizer.suffix + ext)); + form.parse(req, async (_err, _fields, files) => { + let results: ImageFileResponse[] = []; + for (const key in files) { + const { type, path: location, name } = files[key]; + const filename = path.basename(location); + let uploadInformation: Opt<DashUploadUtils.UploadInformation>; + if (filename.endsWith(".pdf")) { + let dataBuffer = fs.readFileSync(uploadDirectory + filename); + const result: ParsedPDF = await pdf(dataBuffer); + await new Promise<void>(resolve => { + const path = pdfDirectory + "/" + filename.substring(0, filename.length - ".pdf".length) + ".txt"; + fs.createWriteStream(path).write(result.text, error => { + if (!error) { + resolve(); + } else { + reject(error); + } + }); }); + } else { + uploadInformation = await DashUploadUtils.UploadImage(uploadDirectory + filename, filename); } - names.push(`/files/` + file); + const exif = uploadInformation ? uploadInformation.exifData : undefined; + results.push({ name, type, path: `/files/${filename}`, exif }); + } - res.send(names); + _success(res, results); }); } ); +app.post(RouteStore.inspectImage, async (req, res) => { + const { source } = req.body; + if (typeof source === "string") { + const uploadInformation = await DashUploadUtils.UploadImage(source); + return res.send(await DashUploadUtils.InspectImage(uploadInformation.mediaPaths[0])); + } + res.send({}); +}); + addSecureRoute( Method.POST, (user, res, req) => { @@ -613,7 +725,7 @@ addSecureRoute( res.status(401).send("incorrect parameters specified"); return; } - imageDataUri.outputFile(uri, uploadDir + filename).then((savedName: string) => { + imageDataUri.outputFile(uri, uploadDirectory + filename).then((savedName: string) => { const ext = path.extname(savedName); let resizers = [ { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, @@ -634,7 +746,7 @@ addSecureRoute( } if (isImage) { resizers.forEach(resizer => { - fs.createReadStream(savedName).pipe(resizer.resizer).pipe(fs.createWriteStream(uploadDir + filename + resizer.suffix + ext)); + fs.createReadStream(savedName).pipe(resizer.resizer).pipe(fs.createWriteStream(uploadDirectory + filename + resizer.suffix + ext)); }); } res.send("/files/" + filename + ext); @@ -681,21 +793,29 @@ app.use(RouteStore.corsProxy, (req, res) => { }).pipe(res); }); -app.get(RouteStore.delete, (req, res) => { - if (release) { - res.send("no"); - return; - } - deleteFields().then(() => res.redirect(RouteStore.home)); -}); +addSecureRoute( + Method.GET, + (user, res, req) => { + if (release) { + return _permission_denied(res, deletionPermissionError); + } + deleteFields().then(() => res.redirect(RouteStore.home)); + }, + undefined, + RouteStore.delete +); -app.get(RouteStore.deleteAll, (req, res) => { - if (release) { - res.send("no"); - return; - } - deleteAll().then(() => res.redirect(RouteStore.home)); -}); +addSecureRoute( + Method.GET, + (_user, res, _req) => { + if (release) { + return _permission_denied(res, deletionPermissionError); + } + deleteAll().then(() => res.redirect(RouteStore.home)); + }, + undefined, + RouteStore.deleteAll +); app.use(wdm(compiler, { publicPath: config.output.publicPath })); @@ -801,8 +921,7 @@ function HandleYoutubeQuery([query, callback]: [YoutubeQueryInput, (result?: any } } -const credentials = path.join(__dirname, "./credentials/google_docs_credentials.json"); -const token = path.join(__dirname, "./credentials/google_docs_token.json"); +const credentialsPath = path.join(__dirname, "./credentials/google_docs_credentials.json"); const EndpointHandlerMap = new Map<GoogleApiServerUtils.Action, GoogleApiServerUtils.ApiRouter>([ ["create", (api, params) => api.create(params)], @@ -811,10 +930,10 @@ const EndpointHandlerMap = new Map<GoogleApiServerUtils.Action, GoogleApiServerU ]); app.post(RouteStore.googleDocs + "/:sector/:action", (req, res) => { - let sector = req.params.sector; - let action = req.params.action; - GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector as any], { credentials, token }).then(endpoint => { - let handler = EndpointHandlerMap.get(action as any); + let sector: GoogleApiServerUtils.Service = req.params.sector as GoogleApiServerUtils.Service; + let action: GoogleApiServerUtils.Action = req.params.action as GoogleApiServerUtils.Action; + GoogleApiServerUtils.GetEndpoint(GoogleApiServerUtils.Service[sector], { credentialsPath, userId: req.headers.userId as string }).then(endpoint => { + let handler = EndpointHandlerMap.get(action); if (endpoint && handler) { let execute = handler(endpoint, req.body).then( response => res.send(response.data), @@ -827,6 +946,151 @@ app.post(RouteStore.googleDocs + "/:sector/:action", (req, res) => { }); }); +app.get(RouteStore.readGoogleAccessToken, async (req, res) => { + const userId = req.header("userId")!; + const token = await Database.Auxiliary.GoogleAuthenticationToken.Fetch(userId); + const information = { credentialsPath, userId }; + if (!token) { + return res.send(await GoogleApiServerUtils.GenerateAuthenticationUrl(information)); + } + GoogleApiServerUtils.RetrieveAccessToken(information).then(token => res.send(token)); +}); + +app.post(RouteStore.writeGoogleAccessToken, async (req, res) => { + const userId = req.header("userId")!; + const information = { credentialsPath, userId }; + const { token } = await GoogleApiServerUtils.ProcessClientSideCode(information, req.body.authenticationCode); + res.send(token.access_token); +}); + +const tokenError = "Unable to successfully upload bytes for all images!"; +const mediaError = "Unable to convert all uploaded bytes to media items!"; +const userIdError = "Unable to parse the identification of the user!"; + +export interface NewMediaItem { + description: string; + simpleMediaItem: { + uploadToken: string; + }; +} + +app.post(RouteStore.googlePhotosMediaUpload, async (req, res) => { + const { media } = req.body; + const userId = req.header("userId"); + + if (!userId) { + return _error(res, userIdError); + } + + await GooglePhotosUploadUtils.initialize({ credentialsPath, userId }); + + let failed: number[] = []; + + const newMediaItems = await BatchedArray.from<GooglePhotosUploadUtils.MediaInput>(media, { batchSize: 25 }).batchedMapPatientInterval( + { magnitude: 100, unit: TimeUnit.Milliseconds }, + async (batch: GooglePhotosUploadUtils.MediaInput[]) => { + const newMediaItems: NewMediaItem[] = []; + for (let index = 0; index < batch.length; index++) { + const element = batch[index]; + const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(element.url); + if (!uploadToken) { + failed.push(index); + } else { + newMediaItems.push({ + description: element.description, + simpleMediaItem: { uploadToken } + }); + } + } + return newMediaItems; + } + ); + + const failedCount = failed.length; + if (failedCount) { + console.log(`Unable to upload ${failedCount} image${failedCount === 1 ? "" : "s"} to Google's servers`); + } + + GooglePhotosUploadUtils.CreateMediaItems(newMediaItems, req.body.album).then( + result => _success(res, { results: result.newMediaItemResults, failed }), + error => _error(res, mediaError, error) + ); +}); + +interface MediaItem { + baseUrl: string; + filename: string; +} +const prefix = "google_photos_"; + +const downloadError = "Encountered an error while executing downloads."; +const requestError = "Unable to execute download: the body's media items were malformed."; +const deletionPermissionError = "Cannot perform specialized delete outside of the development environment!"; + +app.get("/deleteWithAux", async (_req, res) => { + if (release) { + return _permission_denied(res, deletionPermissionError); + } + await Database.Auxiliary.DeleteAll(); + res.redirect(RouteStore.delete); +}); + +app.get("/deleteWithGoogleCredentials", async (req, res) => { + if (release) { + return _permission_denied(res, deletionPermissionError); + } + await Database.Auxiliary.GoogleAuthenticationToken.DeleteAll(); + res.redirect(RouteStore.delete); +}); + +const UploadError = (count: number) => `Unable to upload ${count} images to Dash's server`; +app.post(RouteStore.googlePhotosMediaDownload, async (req, res) => { + const contents: { mediaItems: MediaItem[] } = req.body; + let failed = 0; + if (contents) { + const completed: Opt<DashUploadUtils.UploadInformation>[] = []; + for (let item of contents.mediaItems) { + const { contentSize, ...attributes } = await DashUploadUtils.InspectImage(item.baseUrl); + const found: Opt<DashUploadUtils.UploadInformation> = await Database.Auxiliary.QueryUploadHistory(contentSize!); + if (!found) { + const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error)); + if (upload) { + completed.push(upload); + await Database.Auxiliary.LogUpload(upload); + } else { + failed++; + } + } else { + completed.push(found); + } + } + if (failed) { + return _error(res, UploadError(failed)); + } + return _success(res, completed); + } + _invalid(res, requestError); +}); + +const _error = (res: Response, message: string, error?: any) => { + res.statusMessage = message; + res.status(STATUS.EXECUTION_ERROR).send(error); +}; + +const _success = (res: Response, body: any) => { + res.status(STATUS.OK).send(body); +}; + +const _invalid = (res: Response, message: string) => { + res.statusMessage = message; + res.status(STATUS.BAD_REQUEST).send(); +}; + +const _permission_denied = (res: Response, message: string) => { + res.statusMessage = message; + res.status(STATUS.BAD_REQUEST).send("Permission Denied!"); +}; + const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = { "number": "_n", "string": "_t", diff --git a/src/server/updateSearch.ts b/src/server/updateSearch.ts deleted file mode 100644 index 906b795f1..000000000 --- a/src/server/updateSearch.ts +++ /dev/null @@ -1,123 +0,0 @@ -import { Database } from "./database"; -import { Cursor } from "mongodb"; -import { Search } from "./Search"; -import pLimit from 'p-limit'; - -const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = { - "number": "_n", - "string": "_t", - "boolean": "_b", - // "image": ["_t", "url"], - "video": ["_t", "url"], - "pdf": ["_t", "url"], - "audio": ["_t", "url"], - "web": ["_t", "url"], - "date": ["_d", value => new Date(value.date).toISOString()], - "proxy": ["_i", "fieldId"], - "list": ["_l", list => { - const results = []; - for (const value of list.fields) { - const term = ToSearchTerm(value); - if (term) { - results.push(term.value); - } - } - return results.length ? results : null; - }] -}; - -function ToSearchTerm(val: any): { suffix: string, value: any } | undefined { - if (val === null || val === undefined) { - return; - } - const type = val.__type || typeof val; - let suffix = suffixMap[type]; - if (!suffix) { - return; - } - - if (Array.isArray(suffix)) { - const accessor = suffix[1]; - if (typeof accessor === "function") { - val = accessor(val); - } else { - val = val[accessor]; - } - suffix = suffix[0]; - } - - return { suffix, value: val }; -} - -function getSuffix(value: string | [string, any]): string { - return typeof value === "string" ? value : value[0]; -} - -const limit = pLimit(5); -async function update() { - // await new Promise(res => setTimeout(res, 5)); - console.log("update"); - await Search.Instance.clear(); - const cursor = await Database.Instance.query({}); - console.log("Cleared"); - const updates: any[] = []; - let numDocs = 0; - function updateDoc(doc: any) { - numDocs++; - if ((numDocs % 50) === 0) { - console.log("updateDoc " + numDocs); - } - // console.log("doc " + numDocs); - if (doc.__type !== "Doc") { - return; - } - const fields = doc.fields; - if (!fields) { - return; - } - const update: any = { id: doc._id }; - let dynfield = false; - for (const key in fields) { - const value = fields[key]; - const term = ToSearchTerm(value); - if (term !== undefined) { - let { suffix, value } = term; - update[key + suffix] = value; - dynfield = true; - } - } - if (dynfield) { - updates.push(update); - // console.log(updates.length); - } - } - await cursor.forEach(updateDoc); - console.log(`Updating ${updates.length} documents`); - const result = await Search.Instance.updateDocuments(updates); - try { - console.log(JSON.parse(result).responseHeader.status); - } catch { - console.log("Error:"); - // console.log(updates[i]); - console.log(result); - console.log("\n"); - } - // for (let i = 0; i < updates.length; i++) { - // console.log(i); - // const result = await Search.Instance.updateDocument(updates[i]); - // try { - // console.log(JSON.parse(result).responseHeader.status); - // } catch { - // console.log("Error:"); - // console.log(updates[i]); - // console.log(result); - // console.log("\n"); - // } - // } - // await Promise.all(updates.map(update => { - // return limit(() => Search.Instance.updateDocument(update)); - // })); - cursor.close(); -} - -update();
\ No newline at end of file |