aboutsummaryrefslogtreecommitdiff
path: root/src/server
diff options
context:
space:
mode:
Diffstat (limited to 'src/server')
-rw-r--r--src/server/ApiManagers/AssistantManager.ts58
-rw-r--r--src/server/ApiManagers/DataVizManager.ts2
-rw-r--r--src/server/ApiManagers/FireflyManager.ts407
-rw-r--r--src/server/ApiManagers/UploadManager.ts35
-rw-r--r--src/server/DashUploadUtils.ts57
-rw-r--r--src/server/RouteManager.ts3
-rw-r--r--src/server/authentication/DashUserModel.ts5
-rw-r--r--src/server/flashcard/labels.py285
-rw-r--r--src/server/flashcard/requirements.txt12
-rw-r--r--src/server/flashcard/venv/pyvenv.cfg3
-rw-r--r--src/server/index.ts3
-rw-r--r--src/server/server_Initialization.ts10
12 files changed, 804 insertions, 76 deletions
diff --git a/src/server/ApiManagers/AssistantManager.ts b/src/server/ApiManagers/AssistantManager.ts
index c5ba4b830..c41f697db 100644
--- a/src/server/ApiManagers/AssistantManager.ts
+++ b/src/server/ApiManagers/AssistantManager.ts
@@ -9,28 +9,22 @@
*/
import { Readability } from '@mozilla/readability';
-import axios, { AxiosResponse } from 'axios';
+import axios from 'axios';
import { spawn } from 'child_process';
import * as fs from 'fs';
import { writeFile } from 'fs';
import { google } from 'googleapis';
import { JSDOM } from 'jsdom';
+import OpenAI from 'openai';
import * as path from 'path';
import * as puppeteer from 'puppeteer';
import { promisify } from 'util';
import * as uuid from 'uuid';
import { AI_Document } from '../../client/views/nodes/chatbot/types/types';
+import { DashUploadUtils } from '../DashUploadUtils';
import { Method } from '../RouteManager';
import { filesDirectory, publicDirectory } from '../SocketData';
import ApiManager, { Registration } from './ApiManager';
-import { getServerPath } from '../../client/util/reportManager/reportManagerUtils';
-import { file } from 'jszip';
-import ffmpegInstaller from '@ffmpeg-installer/ffmpeg';
-import ffmpeg from 'fluent-ffmpeg';
-import OpenAI from 'openai';
-import * as xmlbuilder from 'xmlbuilder';
-import { last } from 'lodash';
-import { DashUploadUtils } from '../DashUploadUtils';
// Enumeration of directories where different file types are stored
export enum Directory {
@@ -133,8 +127,6 @@ export default class AssistantManager extends ApiManager {
const { query, max_results } = req.body;
const MIN_VALID_RESULTS_RATIO = 0.75; // 3/4 threshold
let startIndex = 1; // Start at the first result initially
- let validResults: any[] = [];
-
const fetchSearchResults = async (start: number) => {
return customsearch.cse.list({
q: query,
@@ -146,20 +138,27 @@ export default class AssistantManager extends ApiManager {
});
};
- const filterResultsByXFrameOptions = async (results: any[]) => {
+ const filterResultsByXFrameOptions = async (
+ results: {
+ url: string | null | undefined;
+ snippet: string | null | undefined;
+ }[]
+ ) => {
const filteredResults = await Promise.all(
- results.map(async result => {
- try {
- const urlResponse: AxiosResponse = await axios.head(result.url, { timeout: 5000 });
- const xFrameOptions = urlResponse.headers['x-frame-options'];
- if (xFrameOptions && xFrameOptions.toUpperCase() === 'SAMEORIGIN') {
- return result;
+ results
+ .filter(result => result.url)
+ .map(async result => {
+ try {
+ const urlResponse = await axios.head(result.url!, { timeout: 5000 });
+ const xFrameOptions = urlResponse.headers['x-frame-options'];
+ if (xFrameOptions && xFrameOptions.toUpperCase() === 'SAMEORIGIN') {
+ return result;
+ }
+ } catch (error) {
+ console.error(`Error checking x-frame-options for URL: ${result.url}`, error);
}
- } catch (error) {
- console.error(`Error checking x-frame-options for URL: ${result.url}`, error);
- }
- return null; // Exclude the result if it doesn't match
- })
+ return null; // Exclude the result if it doesn't match
+ })
);
return filteredResults.filter(result => result !== null); // Remove null results
};
@@ -167,14 +166,14 @@ export default class AssistantManager extends ApiManager {
try {
// Fetch initial search results
let response = await fetchSearchResults(startIndex);
- let initialResults =
+ const initialResults =
response.data.items?.map(item => ({
url: item.link,
snippet: item.snippet,
})) || [];
// Filter the initial results
- validResults = await filterResultsByXFrameOptions(initialResults);
+ let validResults = await filterResultsByXFrameOptions(initialResults);
// If valid results are less than 3/4 of max_results, fetch more results
while (validResults.length < max_results * MIN_VALID_RESULTS_RATIO) {
@@ -288,7 +287,7 @@ export default class AssistantManager extends ApiManager {
// Step 2: Transcribe audio using OpenAI Whisper
console.log('Transcribing audio...');
const transcription = await openai.audio.transcriptions.create({
- file: fs.createReadStream(audioPath) as any,
+ file: fs.createReadStream(audioPath),
model: 'whisper-1',
response_format: 'verbose_json',
timestamp_granularities: ['segment'],
@@ -298,7 +297,7 @@ export default class AssistantManager extends ApiManager {
// Step 3: Extract concise JSON
console.log('Extracting concise JSON...');
- const originalSegments = transcription.segments?.map((segment: any, index: number) => ({
+ const originalSegments = transcription.segments?.map((segment, index) => ({
index: index.toString(),
text: segment.text,
start: segment.start,
@@ -767,7 +766,7 @@ function spawnPythonProcess(jobId: string, file_name: string, file_path: string)
try {
const errorOutput = JSON.parse(lastLine);
jobResults[jobId] = errorOutput;
- } catch (err) {
+ } catch {
jobResults[jobId] = { error: 'Python process failed' };
}
} else {
@@ -829,6 +828,3 @@ function spawnPythonProcess(jobId: string, file_name: string, file_path: string)
runPythonScript();
}
}
-function customFfmpeg(filePath: string) {
- throw new Error('Function not implemented.');
-}
diff --git a/src/server/ApiManagers/DataVizManager.ts b/src/server/ApiManagers/DataVizManager.ts
index 88f22992d..d2028f23b 100644
--- a/src/server/ApiManagers/DataVizManager.ts
+++ b/src/server/ApiManagers/DataVizManager.ts
@@ -9,7 +9,7 @@ export default class DataVizManager extends ApiManager {
register({
method: Method.GET,
subscription: '/csvData',
- secureHandler: async ({ req, res }) => {
+ secureHandler: ({ req, res }) => {
const uri = req.query.uri as string;
return new Promise<void>(resolve => {
diff --git a/src/server/ApiManagers/FireflyManager.ts b/src/server/ApiManagers/FireflyManager.ts
new file mode 100644
index 000000000..160a94d40
--- /dev/null
+++ b/src/server/ApiManagers/FireflyManager.ts
@@ -0,0 +1,407 @@
+import axios from 'axios';
+import { Dropbox } from 'dropbox';
+import * as fs from 'fs';
+import * as multipart from 'parse-multipart-data';
+import * as path from 'path';
+import { DashUserModel } from '../authentication/DashUserModel';
+import { DashUploadUtils } from '../DashUploadUtils';
+import { _error, _invalid, _success, Method } from '../RouteManager';
+import { Directory, filesDirectory } from '../SocketData';
+import ApiManager, { Registration } from './ApiManager';
+
+export default class FireflyManager extends ApiManager {
+ getBearerToken = () =>
+ fetch('https://ims-na1.adobelogin.com/ims/token/v3', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/x-www-form-urlencoded',
+ },
+ body: `grant_type=client_credentials&client_id=${process.env._CLIENT_FIREFLY_CLIENT_ID}&client_secret=${process.env._CLIENT_FIREFLY_SECRET}&scope=openid,AdobeID,session,additional_info,read_organizations,firefly_api,ff_apis`,
+ }).catch(error => {
+ console.error('Error:', error);
+ return undefined;
+ });
+
+ generateImageFromStructure = (prompt: string = 'a realistic illustration of a cat coding', width: number = 2048, height: number = 2048, structureUrl: string, strength: number = 50, styles: string[], styleUrl: string | undefined) =>
+ this.getBearerToken().then(response =>
+ response?.json().then((data: { access_token: string }) =>
+ //prettier-ignore
+ fetch('https://firefly-api.adobe.io/v3/images/generate', {
+ method: 'POST',
+ headers: [
+ ['Content-Type', 'application/json'],
+ ['Accept', 'application/json'],
+ ['x-api-key', process.env._CLIENT_FIREFLY_CLIENT_ID ?? ''],
+ ['Authorization', `Bearer ${data.access_token}`],
+ ],
+ body: JSON.stringify({
+ prompt,
+ numVariations: 4,
+ detailLevel: 'preview',
+ modelVersion: 'image3_fast',
+ size: { width, height },
+ structure: !structureUrl
+ ? undefined
+ : {
+ strength,
+ imageReference: {
+ source: { url: structureUrl },
+ },
+ },
+ // prettier-ignore
+ style: {
+ presets: styles,
+ imageReference : !styleUrl
+ ? undefined
+ : {
+ source: { url: styleUrl },
+ }
+ }
+ }),
+ })
+ .then(response2 => response2.json().then(json =>
+ {
+ if (json.outputs?.length)
+ return (json.outputs as {image: {url:string }}[]).map(output => output.image);
+ throw new Error(JSON.stringify(json));
+ })
+ )
+ )
+ );
+
+ uploadImageToDropbox = (fileUrl: string, user: DashUserModel | undefined, dbx = new Dropbox({ accessToken: user?.dropboxToken || '' })) =>
+ new Promise<string | Error>((res, rej) =>
+ fs.readFile(path.join(filesDirectory, `${Directory.images}/${path.basename(fileUrl)}`), undefined, (err, contents) => {
+ if (err) {
+ console.log('Error: ', err);
+ rej();
+ } else {
+ dbx.filesUpload({ path: `/Apps/browndash/${path.basename(fileUrl)}`, contents })
+ .then(response => {
+ dbx.filesGetTemporaryLink({ path: response.result.path_display ?? '' })
+ .then(link => res(link.result.link))
+ .catch(e => res(new Error(e.toString())));
+ })
+ .catch(e => {
+ if (user?.dropboxRefresh) {
+ console.log('*********** try refresh dropbox for: ' + user.email + ' ***********');
+ this.refreshDropboxToken(user).then(token => {
+ if (!token) {
+ console.log('Dropbox error: cannot refresh token');
+ res(new Error(e.toString()));
+ } else {
+ const dbxNew = new Dropbox({ accessToken: user.dropboxToken || '' });
+ dbxNew
+ .filesUpload({ path: `/Apps/browndash/${path.basename(fileUrl)}`, contents })
+ .then(response => {
+ dbxNew
+ .filesGetTemporaryLink({ path: response.result.path_display ?? '' })
+ .then(link => res(link.result.link))
+ .catch(linkErr => res(new Error(linkErr.toString())));
+ })
+ .catch(uploadErr => {
+ console.log('Dropbox error:', uploadErr);
+ res(new Error(uploadErr.toString()));
+ });
+ }
+ });
+ } else {
+ console.log('Dropbox error:', e);
+ res(new Error(e.toString()));
+ }
+ });
+ }
+ })
+ );
+
+ generateImage = (prompt: string = 'a realistic illustration of a cat coding', width: number = 2048, height: number = 2048, seed?: number) => {
+ let body = `{ "prompt": "${prompt}", "size": { "width": ${width}, "height": ${height}} }`;
+ if (seed) {
+ console.log('RECEIVED SEED', seed);
+ body = `{ "prompt": "${prompt}", "size": { "width": ${width}, "height": ${height}}, "seeds": [${seed}]}`;
+ }
+ const fetched = this.getBearerToken().then(response =>
+ response?.json().then((data: { access_token: string }) =>
+ fetch('https://firefly-api.adobe.io/v3/images/generate', {
+ method: 'POST',
+ headers: [
+ ['Content-Type', 'application/json'],
+ ['Accept', 'application/json'],
+ ['x-api-key', process.env._CLIENT_FIREFLY_CLIENT_ID ?? ''],
+ ['Authorization', `Bearer ${data.access_token}`],
+ ],
+ body: body,
+ })
+ .then(response2 => response2.json().then(json => ({ seed: json.outputs?.[0]?.seed, url: json.outputs?.[0]?.image?.url })))
+ .catch(error => {
+ console.error('Error:', error);
+ return undefined;
+ })
+ )
+ );
+ return fetched;
+ };
+ expandImage = (imgUrl: string, prompt?: string) => {
+ const dropboxImgUrl = imgUrl;
+ const fetched = this.getBearerToken().then(response =>
+ response
+ ?.json()
+ .then((data: { access_token: string }) => {
+ return fetch('https://firefly-api.adobe.io/v3/images/expand', {
+ method: 'POST',
+ headers: [
+ ['Content-Type', 'application/json'],
+ ['Accept', 'application/json'],
+ ['x-api-key', process.env._CLIENT_FIREFLY_CLIENT_ID ?? ''],
+ ['Authorization', `Bearer ${data.access_token}`],
+ ],
+ body: JSON.stringify({
+ image: {
+ source: {
+ url: dropboxImgUrl,
+ },
+ },
+ numVariations: 1,
+ seeds: [0],
+ size: {
+ width: 3048,
+ height: 2048,
+ },
+ prompt: prompt ?? 'cloudy skies',
+ placement: {
+ inset: {
+ left: 0,
+ top: 0,
+ right: 0,
+ bottom: 0,
+ },
+ alignment: {
+ horizontal: 'center',
+ vertical: 'center',
+ },
+ },
+ }),
+ });
+ })
+ .then(resp => resp.json())
+ );
+ return fetched;
+ };
+ getImageText = (imageBlob: Blob) => {
+ const inputFileVarName = 'infile';
+ const outputVarName = 'result';
+ const fetched = this.getBearerToken().then(response =>
+ response?.json().then((data: { access_token: string }) => {
+ return fetch('https://sensei.adobe.io/services/v2/predict', {
+ method: 'POST',
+ headers: [
+ ['Prefer', 'respond-async, wait=59'],
+ ['x-api-key', process.env._CLIENT_FIREFLY_CLIENT_ID ?? ''],
+ // ['content-type', 'multipart/form-data'], // bcz: Don't set this!! content-type will get set automatically including the Boundary string
+ ['Authorization', `Bearer ${data.access_token}`],
+ ],
+ body: ((form: FormData) => {
+ form.set(inputFileVarName, imageBlob);
+ form.set(
+ 'contentAnalyzerRequests',
+ JSON.stringify({
+ 'sensei:name': 'Feature:cintel-object-detection:Service-b9ace8b348b6433e9e7d82371aa16690',
+ 'sensei:invocation_mode': 'asynchronous',
+ 'sensei:invocation_batch': false,
+ 'sensei:engines': [
+ {
+ 'sensei:execution_info': {
+ 'sensei:engine': 'Feature:cintel-object-detection:Service-b9ace8b348b6433e9e7d82371aa16690',
+ },
+ 'sensei:inputs': {
+ documents: [
+ {
+ 'sensei:multipart_field_name': inputFileVarName,
+ 'dc:format': 'image/png',
+ },
+ ],
+ },
+ 'sensei:params': {
+ correct_with_dictionary: true,
+ },
+ 'sensei:outputs': {
+ result: {
+ 'sensei:multipart_field_name': outputVarName,
+ 'dc:format': 'application/json',
+ },
+ },
+ },
+ ],
+ })
+ );
+ return form;
+ })(new FormData()),
+ }).then(response2 => {
+ const contentType = response2.headers.get('content-type') ?? '';
+ if (contentType.includes('application/json')) {
+ return response2.json().then((json: object) => JSON.stringify(json));
+ }
+ if (contentType.includes('multipart')) {
+ return response2
+ .arrayBuffer()
+ .then(arrayBuffer =>
+ multipart
+ .parse(Buffer.from(arrayBuffer), 'Boundary' + (response2.headers.get('content-type')?.match(/=Boundary(.*);/)?.[1] ?? ''))
+ .filter(part => part.name === outputVarName)
+ .map(part => JSON.parse(part.data.toString())[0])
+ .reduce((text, json) => text + (json?.is_text_present ? json.tags.map((tag: { text: string }) => tag.text).join(' ') : ''), '')
+ )
+ .catch(error => {
+ console.error('Error:', error);
+ return '';
+ });
+ }
+ return response2.text();
+ });
+ })
+ );
+ return fetched;
+ };
+
+ refreshDropboxToken = (user: DashUserModel) =>
+ axios
+ .post(
+ 'https://api.dropbox.com/oauth2/token',
+ new URLSearchParams({
+ refresh_token: user.dropboxRefresh || '',
+ grant_type: 'refresh_token',
+ client_id: process.env._CLIENT_DROPBOX_CLIENT_ID || '',
+ client_secret: process.env._CLIENT_DROPBOX_SECRET || '',
+ }).toString()
+ )
+ .then(refresh => {
+ console.log('***** dropbox token refreshed for ' + user?.email + ' ******* ');
+ user.dropboxToken = refresh.data.access_token;
+ user.save();
+ return user.dropboxToken;
+ })
+ .catch(e => {
+ console.log(e);
+ return undefined;
+ });
+
+ protected initialize(register: Registration): void {
+ register({
+ method: Method.POST,
+ subscription: '/queryFireflyImageFromStructure',
+ secureHandler: ({ req, res }) =>
+ new Promise<void>(resolver => {
+ (req.body.styleUrl ? this.uploadImageToDropbox(req.body.styleUrl, req.user as DashUserModel) : Promise.resolve(undefined))
+ .then(styleUrl => {
+ if (styleUrl instanceof Error) {
+ _invalid(res, styleUrl.message);
+ throw new Error('Error uploading images to dropbox');
+ }
+ this.uploadImageToDropbox(req.body.structure, req.user as DashUserModel)
+ .then(structureUrl => {
+ if (structureUrl instanceof Error) {
+ _invalid(res, structureUrl.message);
+ throw new Error('Error uploading images to dropbox');
+ }
+ return { styleUrl, structureUrl };
+ })
+ .then(uploads =>
+ this.generateImageFromStructure(req.body.prompt, req.body.width, req.body.height, uploads.structureUrl, req.body.strength, req.body.presets, uploads.styleUrl)
+ .then(images => {
+ Promise.all((images ?? [new Error('no images were generated')]).map(fire => (fire instanceof Error ? fire : DashUploadUtils.UploadImage(fire.url))))
+ .then(dashImages => {
+ if (dashImages.every(img => img instanceof Error)) _invalid(res, dashImages[0]!.message);
+ else _success(res, JSON.stringify(dashImages.filter(img => !(img instanceof Error))));
+ })
+ .then(resolver);
+ })
+ .catch(e => {
+ _invalid(res, e.message);
+ resolver();
+ })
+ );
+ })
+ .catch(() => {
+ /* do nothing */
+ resolver();
+ });
+ }),
+ });
+ register({
+ method: Method.POST,
+ subscription: '/queryFireflyImage',
+ secureHandler: ({ req, res }) =>
+ this.generateImage(req.body.prompt, req.body.width, req.body.height, req.body.seed).then(img =>
+ DashUploadUtils.UploadImage(img?.url ?? '', undefined, img?.seed).then(info => {
+ if (info instanceof Error) _invalid(res, info.message);
+ else _success(res, info);
+ })
+ ),
+ });
+
+ register({
+ method: Method.POST,
+ subscription: '/queryFireflyImageText',
+ secureHandler: ({ req, res }) =>
+ fetch(req.body.file).then(json =>
+ json.blob().then(file =>
+ this.getImageText(file).then(text => {
+ _success(res, text);
+ })
+ )
+ ),
+ });
+ register({
+ method: Method.POST,
+ subscription: '/expandImage',
+ secureHandler: ({ req, res }) =>
+ this.uploadImageToDropbox(req.body.file, req.user as DashUserModel).then(uploadUrl =>
+ uploadUrl instanceof Error
+ ? _invalid(res, uploadUrl.message)
+ : this.expandImage(uploadUrl, req.body.prompt).then(text => {
+ if (text.error_code) _error(res, text.message);
+ else
+ DashUploadUtils.UploadImage(text.outputs[0].image.url).then(info => {
+ if (info instanceof Error) _invalid(res, info.message);
+ else _success(res, info);
+ });
+ })
+ ),
+ });
+
+ // construct this url and send user to it. It will allow them to authorize their dropbox account and will send the resulting token to our endpoint /refreshDropbox
+ // https://www.dropbox.com/oauth2/authorize?client_id=DROPBOX_CLIENT_ID&response_type=code&token_access_type=offline&redirect_uri=http://localhost:1050/refreshDropbox
+ // see: https://dropbox.tech/developers/using-oauth-2-0-with-offline-access
+ //
+ register({
+ method: Method.GET,
+ subscription: '/refreshDropbox',
+ secureHandler: ({ req, res }) => {
+ const user = req.user as DashUserModel;
+ console.log(`******************* dropbox authorized for ${user?.email} ******************`);
+ _success(res, 'dropbox authorized for ' + user?.email);
+
+ const data = new URLSearchParams({
+ code: req.query.code as string,
+ grant_type: 'authorization_code',
+ client_id: process.env._CLIENT_DROPBOX_CLIENT_ID ?? '',
+ client_secret: process.env._CLIENT_DROPBOX_SECRET ?? '',
+ redirect_uri: 'http://localhost:1050/refreshDropbox',
+ });
+ axios
+ .post('https://api.dropbox.com/oauth2/token', data.toString())
+ .then(response => {
+ console.log('***** dropbox token (and refresh) received for ' + user?.email + ' ******* ');
+ user.dropboxToken = response.data.access_token;
+ user.dropboxRefresh = response.data.refresh_token;
+ user.save();
+
+ setTimeout(() => this.refreshDropboxToken(user), response.data.expires_in - 600);
+ })
+ .catch(e => {
+ console.log(e);
+ });
+ },
+ });
+ }
+}
diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts
index 868373474..c9d5df547 100644
--- a/src/server/ApiManagers/UploadManager.ts
+++ b/src/server/ApiManagers/UploadManager.ts
@@ -70,10 +70,16 @@ export default class UploadManager extends ApiManager {
]);
} else {
fileguids.split(';').map(guid => DashUploadUtils.uploadProgress.set(guid, `resampling images`));
+ // original filenames with '.'s, such as a Macbook screenshot, can be a problem - their extension is not kept in formidable's newFilename.
+ // This makes sure that the extension is preserved in the newFilename.
+ const fixNewFilename = (f: formidable.File) => {
+ if (path.extname(f.originalFilename ?? '') !== path.extname(f.newFilename)) f.newFilename = f.newFilename + path.extname(f.originalFilename ?? '');
+ return f;
+ };
const results = (
await Promise.all(
Array.from(Object.keys(files)).map(
- async key => (!files[key] ? undefined : DashUploadUtils.upload(files[key]![0] /* , key */)) // key is the guid used by the client to track upload progress.
+ async key => (!files[key] ? undefined : DashUploadUtils.upload(fixNewFilename(files[key][0]) /* , key */)) // key is the guid used by the client to track upload progress.
)
)
).filter(result => result && !(result.result instanceof Error));
@@ -147,13 +153,10 @@ export default class UploadManager extends ApiManager {
if (doc.id) {
doc.id = getId(doc.id);
}
- // eslint-disable-next-line no-restricted-syntax
for (const key in doc.fields) {
- // eslint-disable-next-line no-continue
if (!Object.prototype.hasOwnProperty.call(doc.fields, key)) continue;
const field = doc.fields[key];
- // eslint-disable-next-line no-continue
if (field === undefined || field === null) continue;
if (field.__type === 'Doc') {
@@ -182,11 +185,9 @@ export default class UploadManager extends ApiManager {
let docids: string[] = [];
let linkids: string[] = [];
try {
- // eslint-disable-next-line no-restricted-syntax
for (const name in files) {
if (Object.prototype.hasOwnProperty.call(files, name)) {
const f = files[name];
- // eslint-disable-next-line no-continue
if (!f) continue;
const path2 = f[0]; // what about the rest of the array? are we guaranteed only one value is set?
const zip = new AdmZip(path2.filepath);
@@ -273,14 +274,20 @@ export default class UploadManager extends ApiManager {
.filter(f => regex.test(f))
.map(f => fs.unlinkSync(serverPath + f));
}
- imageDataUri.outputFile(uri, serverPathToFile(Directory.images, InjectSize(filename, origSuffix))).then((savedName: string) => {
- const ext = path.extname(savedName).toLowerCase();
- const outputPath = serverPathToFile(Directory.images, filename + ext);
- if (AcceptableMedia.imageFormats.includes(ext)) {
- workerResample(savedName, outputPath, origSuffix, false);
- }
- res.send(clientPathToFile(Directory.images, filename + ext));
- });
+ imageDataUri
+ .outputFile(uri, serverPathToFile(Directory.images, InjectSize(filename, origSuffix)))
+ .then((savedName: string) => {
+ const ext = path.extname(savedName).toLowerCase();
+ const outputPath = serverPathToFile(Directory.images, filename + ext);
+ if (AcceptableMedia.imageFormats.includes(ext)) {
+ workerResample(savedName, outputPath, origSuffix, false);
+ }
+ res.send(clientPathToFile(Directory.images, filename + ext));
+ })
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ .catch((e: any) => {
+ res.status(404).json({ error: e.toString() });
+ });
},
});
}
diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts
index 1e55a885a..2177c5d97 100644
--- a/src/server/DashUploadUtils.ts
+++ b/src/server/DashUploadUtils.ts
@@ -1,3 +1,4 @@
+/* eslint-disable no-use-before-define */
import axios from 'axios';
import { exec, spawn } from 'child_process';
import { green, red } from 'colors';
@@ -21,7 +22,6 @@ import { AzureManager } from './ApiManagers/AzureManager';
import { AcceptableMedia, Upload } from './SharedMediaTypes';
import { Directory, clientPathToFile, filesDirectory, pathToDirectory, publicDirectory, serverPathToFile } from './SocketData';
import { resolvedServerUrl } from './server_Initialization';
-
import { Worker, isMainThread, parentPort } from 'worker_threads';
// Create an array to store worker threads
@@ -47,20 +47,22 @@ if (isMainThread) {
async function workerResampleImage(message: { imgSourcePath: string; outputPath: string; origSuffix: string; unlinkSource: boolean }) {
const { imgSourcePath, outputPath, origSuffix, unlinkSource } = message;
- const sizes = !origSuffix ? [{ width: 400, suffix: SizeSuffix.Medium }] : DashUploadUtils.imageResampleSizes(path.extname(imgSourcePath));
+ const extension = path.extname(imgSourcePath);
+ const sizes = !origSuffix ? [{ width: 400, suffix: SizeSuffix.Medium }] : DashUploadUtils.imageResampleSizes(extension === '.xml' ? '.png' : extension);
// prettier-ignore
Jimp.read(imgSourcePath)
.then(img =>
sizes.forEach(({ width, suffix }) =>
img.resize({ w: width || img.bitmap.width })
- .write(InjectSize(outputPath, suffix) as `${string}.${string}`)
+ .write(InjectSize(outputPath, suffix) as `${string}.${string}`)
+ .catch(e => console.log("Jimp error:", e))
))
.catch(e => console.log('Error Jimp:', e))
.finally(() => unlinkSource && unlinkSync(imgSourcePath));
}
}
-// eslint-disable-next-line @typescript-eslint/no-var-requires
+// eslint-disable-next-line @typescript-eslint/no-require-imports
const requestImageSize = require('../client/util/request-image-size');
export enum SizeSuffix {
@@ -221,7 +223,6 @@ export namespace DashUploadUtils {
const parseExifData = async (source: string) => {
const image = await request.get(source, { encoding: null });
const { /* data, */ error } = await new Promise<{ data: ExifData; error: string | undefined }>(resolve => {
- // eslint-disable-next-line no-new
new ExifImage({ image }, (exifError, data) => {
resolve({ data, error: exifError?.message });
});
@@ -300,7 +301,6 @@ export namespace DashUploadUtils {
// Bundle up the information into an object
return {
source,
- // eslint-disable-next-line radix
contentSize: parseInt(headers[size]),
contentType: headers[type],
nativeWidth,
@@ -343,15 +343,24 @@ export namespace DashUploadUtils {
const outputPath = path.resolve(pathToDirectory(Directory.images), outputFileName);
const sizes = imageResampleSizes(path.extname(outputFileName));
- const imgReadStream = new Duplex();
- imgReadStream.push(fs.readFileSync(imgSourcePath));
- imgReadStream.push(null);
- await Promise.all(
- sizes.map(({ suffix }) =>
- new Promise<unknown>(res =>
- imgReadStream.pipe(createWriteStream(writtenFiles[suffix] = InjectSize(outputPath, suffix))).on('close', res)
- )
- )); // prettier-ignore
+ if (unlinkSource) {
+ const imgReadStream = new Duplex();
+ imgReadStream.push(fs.readFileSync(imgSourcePath));
+ imgReadStream.push(null);
+ await Promise.all(
+ sizes.map(({ suffix }) =>
+ new Promise<unknown>(res =>
+ imgReadStream.pipe(createWriteStream(writtenFiles[suffix] = InjectSize(outputPath, suffix))).on('close', res)
+ )
+ )); // prettier-ignore
+ } else {
+ await Promise.all(
+ sizes.map(({ suffix }) =>
+ new Promise<unknown>(res =>
+ request.get(imgSourcePath).pipe(createWriteStream(writtenFiles[suffix] = InjectSize(outputPath, suffix))).on('close', res)
+ )
+ )); // prettier-ignore
+ }
workerResample(imgSourcePath, outputPath, SizeSuffix.Original, unlinkSource);
return writtenFiles;
@@ -368,8 +377,9 @@ export namespace DashUploadUtils {
* @returns the accessPaths for the resized files.
*/
export const UploadInspectedImage = async (metadata: Upload.InspectionResults, filename: string, prefix = '', cleanUp = true): Promise<Upload.ImageInformation> => {
- const { requestable, source, ...remaining } = metadata;
- const resolved = filename || `${prefix}upload_${Utils.GenerateGuid()}.${remaining.contentType.split('/')[1].toLowerCase()}`;
+ const { requestable, ...remaining } = metadata;
+ const dfltSuffix = remaining.contentType.split('/')[1].toLowerCase();
+ const resolved = filename || `${prefix}upload_${Utils.GenerateGuid()}.${dfltSuffix === 'xml' ? 'jpg' : dfltSuffix}`;
const { images } = Directory;
const information: Upload.ImageInformation = {
accessPaths: {
@@ -400,10 +410,10 @@ export namespace DashUploadUtils {
writtenFiles = {};
}
} else {
- const unlinkSrcWhenFinished = isLocal().test(source) && cleanUp;
+ const unlinkSrcWhenFinished = cleanUp; // isLocal().test(source) && cleanUp;
try {
writtenFiles = await outputResizedImages(metadata.source, resolved, unlinkSrcWhenFinished);
- } catch (e) {
+ } catch {
// input is a blob or other, try reading it to create a metadata source file.
const reqSource = request(metadata.source);
const readStream: Stream = reqSource instanceof Promise ? await reqSource : reqSource;
@@ -415,7 +425,7 @@ export namespace DashUploadUtils {
.on('error', () => rej());
});
writtenFiles = await outputResizedImages(readSource, resolved, unlinkSrcWhenFinished);
- fs.unlink(readSource, err => console.log("Couldn't unlink temporary image file:" + readSource, err));
+ //fs.unlink(readSource, err => console.log("Couldn't unlink temporary image file:" + readSource, err));
}
}
Array.from(Object.keys(writtenFiles)).forEach(suffix => {
@@ -448,8 +458,7 @@ export namespace DashUploadUtils {
return { name: result.name, message: result.message };
}
const outputFile = filename || result.filename || '';
-
- return UploadInspectedImage(result, outputFile, prefix);
+ return UploadInspectedImage(result, outputFile, prefix, isLocal().exec(source) || source.startsWith('data:') ? true : false);
};
type md5 = 'md5';
@@ -567,7 +576,9 @@ export namespace DashUploadUtils {
switch (category) {
case 'image':
if (imageFormats.includes(format)) {
- const result = await UploadImage(filepath, basename(filepath));
+ const outputName = basename(filepath);
+ const extname = path.extname(originalFilename ?? '');
+ const result = await UploadImage(filepath, outputName.endsWith(extname) ? outputName : outputName + extname, undefined);
return { source: file, result };
}
fs.unlink(filepath, () => {});
diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts
index d8e0455f6..2f6cf80b5 100644
--- a/src/server/RouteManager.ts
+++ b/src/server/RouteManager.ts
@@ -39,8 +39,7 @@ export function _success(res: Response, body: any) {
}
export function _invalid(res: Response, message: string) {
- res.statusMessage = message;
- res.status(STATUS.BAD_REQUEST).send();
+ res.status(STATUS.BAD_REQUEST).send(message);
}
export function _permissionDenied(res: Response, message?: string) {
diff --git a/src/server/authentication/DashUserModel.ts b/src/server/authentication/DashUserModel.ts
index bfa6d7bdb..debeef60c 100644
--- a/src/server/authentication/DashUserModel.ts
+++ b/src/server/authentication/DashUserModel.ts
@@ -9,6 +9,9 @@ export type DashUserModel = mongoose.Document & {
passwordResetToken?: string;
passwordResetExpires?: Date;
+ dropboxRefresh?: string;
+ dropboxToken?: string;
+
userDocumentId: string;
sharingDocumentId: string;
linkDatabaseId: string;
@@ -37,6 +40,8 @@ const userSchema = new mongoose.Schema(
passwordResetToken: String,
passwordResetExpires: Date,
+ dropboxRefresh: String,
+ dropboxToken: String,
userDocumentId: String, // id that identifies a document which hosts all of a user's account data
sharingDocumentId: String, // id that identifies a document that stores documents shared to a user, their user color, and any additional info needed to communicate between users
linkDatabaseId: String,
diff --git a/src/server/flashcard/labels.py b/src/server/flashcard/labels.py
new file mode 100644
index 000000000..546fc4bd3
--- /dev/null
+++ b/src/server/flashcard/labels.py
@@ -0,0 +1,285 @@
+import base64
+import numpy as np
+import base64
+import easyocr
+import sys
+from PIL import Image
+from io import BytesIO
+import requests
+import json
+import numpy as np
+
+class BoundingBoxUtils:
+ """Utility class for bounding box operations and OCR result corrections."""
+
+ @staticmethod
+ def is_close(box1, box2, x_threshold=20, y_threshold=20):
+ """
+ Determines if two bounding boxes are horizontally and vertically close.
+
+ Parameters:
+ box1, box2 (list): The bounding boxes to compare.
+ x_threshold (int): The threshold for horizontal proximity.
+ y_threshold (int): The threshold for vertical proximity.
+
+ Returns:
+ bool: True if boxes are close, False otherwise.
+ """
+ horizontally_close = (abs(box1[2] - box2[0]) < x_threshold or # Right edge of box1 and left edge of box2
+ abs(box2[2] - box1[0]) < x_threshold or # Right edge of box2 and left edge of box1
+ abs(box1[2] - box2[2]) < x_threshold or
+ abs(box2[0] - box1[0]) < x_threshold)
+
+ vertically_close = (abs(box1[3] - box2[1]) < y_threshold or # Bottom edge of box1 and top edge of box2
+ abs(box2[3] - box1[1]) < y_threshold or
+ box1[1] == box2[1] or box1[3] == box2[3])
+
+ return horizontally_close and vertically_close
+
+ @staticmethod
+ def adjust_bounding_box(bbox, original_text, corrected_text):
+ """
+ Adjusts a bounding box based on differences in text length.
+
+ Parameters:
+ bbox (list): The original bounding box coordinates.
+ original_text (str): The original text detected by OCR.
+ corrected_text (str): The corrected text after cleaning.
+
+ Returns:
+ list: The adjusted bounding box.
+ """
+ if not bbox or len(bbox) != 4:
+ return bbox
+
+ # Adjust the x-coordinates slightly to account for text correction
+ x_adjustment = 5
+ adjusted_bbox = [
+ [bbox[0][0] + x_adjustment, bbox[0][1]],
+ [bbox[1][0], bbox[1][1]],
+ [bbox[2][0] + x_adjustment, bbox[2][1]],
+ [bbox[3][0], bbox[3][1]]
+ ]
+ return adjusted_bbox
+
+ @staticmethod
+ def correct_ocr_results(results):
+ """
+ Corrects common OCR misinterpretations in the detected text and adjusts bounding boxes accordingly.
+
+ Parameters:
+ results (list): A list of OCR results, each containing bounding box, text, and confidence score.
+
+ Returns:
+ list: Corrected OCR results with adjusted bounding boxes.
+ """
+ corrections = {
+ "~": "", # Replace '~' with empty string
+ "-": "" # Replace '-' with empty string
+ }
+
+ corrected_results = []
+ for (bbox, text, prob) in results:
+ corrected_text = ''.join(corrections.get(char, char) for char in text)
+ adjusted_bbox = BoundingBoxUtils.adjust_bounding_box(bbox, text, corrected_text)
+ corrected_results.append((adjusted_bbox, corrected_text, prob))
+
+ return corrected_results
+
+ @staticmethod
+ def convert_to_json_serializable(data):
+ """
+ Converts a list containing various types, including numpy types, to a JSON-serializable format.
+
+ Parameters:
+ data (list): A list containing numpy or other non-serializable types.
+
+ Returns:
+ list: A JSON-serializable version of the input list.
+ """
+ def convert_element(element):
+ if isinstance(element, list):
+ return [convert_element(e) for e in element]
+ elif isinstance(element, tuple):
+ return tuple(convert_element(e) for e in element)
+ elif isinstance(element, np.integer):
+ return int(element)
+ elif isinstance(element, np.floating):
+ return float(element)
+ elif isinstance(element, np.ndarray):
+ return element.tolist()
+ else:
+ return element
+
+ return convert_element(data)
+
+class ImageLabelProcessor:
+ """Class to process images and perform OCR with EasyOCR."""
+
+ VERTICAL_THRESHOLD = 20
+ HORIZONTAL_THRESHOLD = 8
+
+ def __init__(self, img_source, source_type, smart_mode):
+ self.img_source = img_source
+ self.source_type = source_type
+ self.smart_mode = smart_mode
+ self.img_val = self.load_image()
+
+ def load_image(self):
+ """Load image from either a base64 string or URL."""
+ if self.source_type == 'drag':
+ return self._load_base64_image()
+ else:
+ return self._load_url_image()
+
+ def _load_base64_image(self):
+ """Decode and save the base64 image."""
+ base64_string = self.img_source
+ if base64_string.startswith("data:image"):
+ base64_string = base64_string.split(",")[1]
+
+
+ # Decode the base64 string
+ image_data = base64.b64decode(base64_string)
+ image = Image.open(BytesIO(image_data)).convert('RGB')
+ image.save("temp_image.jpg")
+ return "temp_image.jpg"
+
+ def _load_url_image(self):
+ """Download image from URL and return it in byte format."""
+ url = self.img_source
+ response = requests.get(url)
+ image = Image.open(BytesIO(response.content)).convert('RGB')
+
+ image_bytes = BytesIO()
+ image.save(image_bytes, format='PNG')
+ return image_bytes.getvalue()
+
+ def process_image(self):
+ """Process the image and return the OCR results."""
+ if self.smart_mode:
+ return self._process_smart_mode()
+ else:
+ return self._process_standard_mode()
+
+ def _process_smart_mode(self):
+ """Process the image in smart mode using EasyOCR."""
+ reader = easyocr.Reader(['en'])
+ result = reader.readtext(self.img_val, detail=1, paragraph=True)
+
+ all_boxes = [bbox for bbox, text in result]
+ all_texts = [text for bbox, text in result]
+
+ response_data = {
+ 'status': 'success',
+ 'message': 'Data received',
+ 'boxes': BoundingBoxUtils.convert_to_json_serializable(all_boxes),
+ 'text': BoundingBoxUtils.convert_to_json_serializable(all_texts),
+ }
+
+ return response_data
+
+ def _process_standard_mode(self):
+ """Process the image in standard mode using EasyOCR."""
+ reader = easyocr.Reader(['en'])
+ results = reader.readtext(self.img_val)
+
+ filtered_results = BoundingBoxUtils.correct_ocr_results([
+ (bbox, text, prob) for bbox, text, prob in results if prob >= 0.7
+ ])
+
+ return self._merge_and_prepare_response(filtered_results)
+
+ def are_vertically_close(self, box1, box2):
+ """Check if two bounding boxes are vertically close."""
+ box1_bottom = max(box1[2][1], box1[3][1])
+ box2_top = min(box2[0][1], box2[1][1])
+ vertical_distance = box2_top - box1_bottom
+
+ box1_left = box1[0][0]
+ box2_left = box2[0][0]
+ box1_right = box1[1][0]
+ box2_right = box2[1][0]
+ hori_close = abs(box2_left - box1_left) <= self.HORIZONTAL_THRESHOLD or abs(box2_right - box1_right) <= self.HORIZONTAL_THRESHOLD
+
+ return vertical_distance <= self.VERTICAL_THRESHOLD and hori_close
+
+ def merge_boxes(self, boxes, texts):
+ """Merge multiple bounding boxes and their associated text."""
+ x_coords = []
+ y_coords = []
+
+ # Collect all x and y coordinates
+ for box in boxes:
+ for point in box:
+ x_coords.append(point[0])
+ y_coords.append(point[1])
+
+ # Create the merged bounding box
+ merged_box = [
+ [min(x_coords), min(y_coords)],
+ [max(x_coords), min(y_coords)],
+ [max(x_coords), max(y_coords)],
+ [min(x_coords), max(y_coords)]
+ ]
+
+ # Combine the texts
+ merged_text = ' '.join(texts)
+
+ return merged_box, merged_text
+
+ def _merge_and_prepare_response(self, filtered_results):
+ """Merge vertically close boxes and prepare the final response."""
+ current_boxes, current_texts = [], []
+ all_boxes, all_texts = [], []
+
+ for ind in range(len(filtered_results) - 1):
+ if not current_boxes:
+ current_boxes.append(filtered_results[ind][0])
+ current_texts.append(filtered_results[ind][1])
+
+ if self.are_vertically_close(filtered_results[ind][0], filtered_results[ind + 1][0]):
+ current_boxes.append(filtered_results[ind + 1][0])
+ current_texts.append(filtered_results[ind + 1][1])
+ else:
+ merged = self.merge_boxes(current_boxes, current_texts)
+ all_boxes.append(merged[0])
+ all_texts.append(merged[1])
+ current_boxes, current_texts = [], []
+
+ if current_boxes:
+ merged = self.merge_boxes(current_boxes, current_texts)
+ all_boxes.append(merged[0])
+ all_texts.append(merged[1])
+
+ if not current_boxes and filtered_results:
+ merged = self.merge_boxes([filtered_results[-1][0]], [filtered_results[-1][1]])
+ all_boxes.append(merged[0])
+ all_texts.append(merged[1])
+
+ response = {
+ 'status': 'success',
+ 'message': 'Data received',
+ 'boxes': BoundingBoxUtils.convert_to_json_serializable(all_boxes),
+ 'text': BoundingBoxUtils.convert_to_json_serializable(all_texts),
+ }
+
+ return response
+
+# Main execution function
+def labels():
+ """Main function to handle image OCR processing based on input arguments."""
+ source_type = sys.argv[2]
+ smart_mode = (sys.argv[3] == 'smart')
+ with open(sys.argv[1], 'r') as f:
+ img_source = f.read()
+ # Create ImageLabelProcessor instance
+ processor = ImageLabelProcessor(img_source, source_type, smart_mode)
+ response = processor.process_image()
+
+ # Print and return the response
+ print(response)
+ return response
+
+
+labels()
diff --git a/src/server/flashcard/requirements.txt b/src/server/flashcard/requirements.txt
new file mode 100644
index 000000000..eb92a819b
--- /dev/null
+++ b/src/server/flashcard/requirements.txt
@@ -0,0 +1,12 @@
+easyocr==1.7.1
+requests==2.32.3
+pillow==10.4.0
+numpy==1.26.4
+tqdm==4.66.4
+Werkzeug==3.0.3
+python-dateutil==2.9.0.post0
+six==1.16.0
+certifi==2024.6.2
+charset-normalizer==3.3.2
+idna==3.7
+urllib3==1.26.19 \ No newline at end of file
diff --git a/src/server/flashcard/venv/pyvenv.cfg b/src/server/flashcard/venv/pyvenv.cfg
new file mode 100644
index 000000000..740014e00
--- /dev/null
+++ b/src/server/flashcard/venv/pyvenv.cfg
@@ -0,0 +1,3 @@
+home = /Library/Frameworks/Python.framework/Versions/3.10/bin
+include-system-site-packages = false
+version = 3.10.11
diff --git a/src/server/index.ts b/src/server/index.ts
index 88dbd232d..1f9af9ee0 100644
--- a/src/server/index.ts
+++ b/src/server/index.ts
@@ -7,6 +7,7 @@ import AssistantManager from './ApiManagers/AssistantManager';
import DataVizManager from './ApiManagers/DataVizManager';
import DeleteManager from './ApiManagers/DeleteManager';
import DownloadManager from './ApiManagers/DownloadManager';
+import FireflyManager from './ApiManagers/FireflyManager';
import GeneralGoogleManager from './ApiManagers/GeneralGoogleManager';
import SessionManager from './ApiManagers/SessionManager';
import UploadManager from './ApiManagers/UploadManager';
@@ -71,6 +72,7 @@ function routeSetter({ addSupervisedRoute, logRegistrationOutcome }: RouteManage
new GeneralGoogleManager(),
/* new GooglePhotosManager(), */ new DataVizManager(),
new AssistantManager(),
+ new FireflyManager(),
];
// initialize API Managers
@@ -112,7 +114,6 @@ function routeSetter({ addSupervisedRoute, logRegistrationOutcome }: RouteManage
});
const serve: PublicHandler = ({ req, res }) => {
- // eslint-disable-next-line new-cap
const detector = new mobileDetect(req.headers['user-agent'] || '');
const filename = detector.mobile() !== null ? 'mobile/image.html' : 'index.html';
res.sendFile(path.join(__dirname, '../../deploy/' + filename));
diff --git a/src/server/server_Initialization.ts b/src/server/server_Initialization.ts
index 4dcb32f8b..a56ab5d18 100644
--- a/src/server/server_Initialization.ts
+++ b/src/server/server_Initialization.ts
@@ -108,14 +108,14 @@ function registerEmbeddedBrowseRelativePathHandler(server: express.Express) {
// detect search query and use default search engine
res.redirect(req.headers.referer + 'corsProxy/' + encodeURIComponent('http://www.google.com' + relativeUrl));
} else {
- res.end();
+ res.status(404).json({ error: 'no such file or endpoint: try /home /logout /login' });
}
});
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function proxyServe(req: any, requrl: string, response: any) {
- // eslint-disable-next-line global-require, @typescript-eslint/no-require-imports, @typescript-eslint/no-var-requires
+ // eslint-disable-next-line @typescript-eslint/no-require-imports
const htmlBodyMemoryStream = new (require('memorystream'))();
let wasinBrFormat = false;
const sendModifiedBody = () => {
@@ -189,7 +189,6 @@ function proxyServe(req: any, requrl: string, response: any) {
res.headers['x-permitted-cross-domain-policies'] = 'all';
res.headers['x-frame-options'] = '';
res.headers['content-security-policy'] = '';
- // eslint-disable-next-line no-multi-assign
response.headers = response._headers = res.headers;
})
.on('end', sendModifiedBody)
@@ -247,6 +246,10 @@ export default async function InitializeServer(routeSetter: RouteSetter) {
const app = buildWithMiddleware(express());
const compiler = webpack(config as webpack.Configuration);
+ // Default route
+ app.get('/', (req, res) => {
+ res.redirect(req.user ? '/home' : '/login'); //res.send('This is the default route.');
+ });
// route table managed by express. routes are tested sequentially against each of these map rules. when a match is found, the handler is called to process the request
app.use(wdm(compiler, { publicPath: config.output.publicPath }));
app.use(whm(compiler));
@@ -259,7 +262,6 @@ export default async function InitializeServer(routeSetter: RouteSetter) {
isRelease && !SSL.Loaded && SSL.exit();
routeSetter(new RouteManager(app, isRelease)); // this sets up all the regular supervised routes (things like /home, download/upload api's, pdf, search, session, etc)
registerEmbeddedBrowseRelativePathHandler(app); // this allows renered web pages which internally have relative paths to find their content
-
isRelease && process.env.serverPort && (resolvedPorts.server = Number(process.env.serverPort));
const server = isRelease ? createServer(SSL.Credentials, app) : app;
await new Promise<void>(resolve => {