aboutsummaryrefslogtreecommitdiff
path: root/src/server
diff options
context:
space:
mode:
Diffstat (limited to 'src/server')
-rw-r--r--src/server/ApiManagers/AzureManager.ts19
-rw-r--r--src/server/ApiManagers/UploadManager.ts21
-rw-r--r--src/server/ApiManagers/UserManager.ts16
-rw-r--r--src/server/DashUploadUtils.ts95
-rw-r--r--src/server/server_Initialization.ts73
5 files changed, 157 insertions, 67 deletions
diff --git a/src/server/ApiManagers/AzureManager.ts b/src/server/ApiManagers/AzureManager.ts
index 12bb98ad0..2d0ab3aa6 100644
--- a/src/server/ApiManagers/AzureManager.ts
+++ b/src/server/ApiManagers/AzureManager.ts
@@ -1,8 +1,18 @@
import { ContainerClient, BlobServiceClient } from "@azure/storage-blob";
import * as fs from "fs";
import { Readable, Stream } from "stream";
+import * as path from "path";
const AZURE_STORAGE_CONNECTION_STRING = process.env.AZURE_STORAGE_CONNECTION_STRING;
+const extToType: { [suffix: string]: string } = {
+ ".jpeg" : "image/jpeg",
+ ".jpg" : "image/jpeg",
+ ".png" : "image/png",
+ ".svg" : "image/svg+xml",
+ ".webp" : "image/webp",
+ ".gif" : "image/gif"
+}
+
export class AzureManager {
private _containerClient: ContainerClient;
private _blobServiceClient: BlobServiceClient;
@@ -10,6 +20,7 @@ export class AzureManager {
public static CONTAINER_NAME = "dashmedia";
public static STORAGE_ACCOUNT_NAME = "dashblobstore";
+ public static BASE_STRING = `https://${AzureManager.STORAGE_ACCOUNT_NAME}.blob.core.windows.net/${AzureManager.CONTAINER_NAME}`;
constructor() {
if (!AZURE_STORAGE_CONNECTION_STRING) {
@@ -38,6 +49,14 @@ export class AzureManager {
return blockBlobClient.uploadStream(stream, undefined, undefined, blobOptions);
}
+ public static UploadBase64ImageBlob(filename: string, data: string, filetype?: string) {
+ const confirmedFiletype = filetype ? filetype : extToType[path.extname(filename)];
+ const buffer = Buffer.from(data, "base64");
+ const blockBlobClient = this.Instance.ContainerClient.getBlockBlobClient(filename);
+ const blobOptions = { blobHTTPHeaders: { blobContentType: confirmedFiletype } };
+ return blockBlobClient.upload(buffer, buffer.length, blobOptions);
+ }
+
public static UploadBlobStream(stream: Readable, filename: string, filetype: string) {
const blockBlobClient = this.Instance.ContainerClient.getBlockBlobClient(filename);
const blobOptions = { blobHTTPHeaders: { blobContentType: filetype }};
diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts
index 820e815d8..56a9d9b6b 100644
--- a/src/server/ApiManagers/UploadManager.ts
+++ b/src/server/ApiManagers/UploadManager.ts
@@ -12,6 +12,7 @@ import { AcceptableMedia, Upload } from '../SharedMediaTypes';
import ApiManager, { Registration } from './ApiManager';
import { SolrManager } from './SearchManager';
import v4 = require('uuid/v4');
+import { DashVersion } from '../../fields/DocSymbols';
const AdmZip = require('adm-zip');
const imageDataUri = require('image-data-uri');
const fs = require('fs');
@@ -45,7 +46,7 @@ export default class UploadManager extends ApiManager {
method: Method.POST,
subscription: '/ping',
secureHandler: async ({ req, res }) => {
- _success(res, { message: 'pong', date: new Date() });
+ _success(res, { message: DashVersion, date: new Date() });
},
});
@@ -63,6 +64,17 @@ export default class UploadManager extends ApiManager {
subscription: '/uploadFormData',
secureHandler: async ({ req, res }) => {
const form = new formidable.IncomingForm();
+ let fileguids = '';
+ let filesize = '';
+ form.on('field', (e: string, value: string) => {
+ if (e === 'fileguids') {
+ (fileguids = value).split(';').map(guid => DashUploadUtils.uploadProgress.set(guid, 'reading file'));
+ }
+ if (e === 'filesize') {
+ filesize = value;
+ }
+ });
+ form.on('progress', e => fileguids.split(';').map(guid => DashUploadUtils.uploadProgress.set(guid, `read:(${Math.round((100 * +e) / +filesize)}%) ${e} of ${filesize}`)));
form.keepExtensions = true;
form.uploadDir = pathToDirectory(Directory.parsed_files);
return new Promise<void>(resolve => {
@@ -101,11 +113,10 @@ export default class UploadManager extends ApiManager {
//req.readableBuffer.head.data
return new Promise<void>(async resolve => {
req.addListener('data', async args => {
- console.log(args);
const payload = String.fromCharCode.apply(String, args);
- const videoId = JSON.parse(payload).videoId;
+ const { videoId, overwriteId } = JSON.parse(payload);
const results: Upload.FileResponse[] = [];
- const result = await DashUploadUtils.uploadYoutube(videoId);
+ const result = await DashUploadUtils.uploadYoutube(videoId, overwriteId ?? videoId);
result && results.push(result);
_success(res, results);
resolve();
@@ -122,7 +133,7 @@ export default class UploadManager extends ApiManager {
req.addListener('data', args => {
const payload = String.fromCharCode.apply(String, args);
const videoId = JSON.parse(payload).videoId;
- _success(res, { progress: DashUploadUtils.QueryYoutubeProgress(videoId) });
+ _success(res, { progress: DashUploadUtils.QueryYoutubeProgress(videoId, req.user) });
resolve();
});
});
diff --git a/src/server/ApiManagers/UserManager.ts b/src/server/ApiManagers/UserManager.ts
index c3dadd821..8b7994eac 100644
--- a/src/server/ApiManagers/UserManager.ts
+++ b/src/server/ApiManagers/UserManager.ts
@@ -5,7 +5,8 @@ import { msToTime } from '../ActionUtilities';
import * as bcrypt from 'bcrypt-nodejs';
import { Opt } from '../../fields/Doc';
import { WebSocket } from '../websocket';
-import { DashStats } from '../DashStats';
+import { resolvedPorts } from '../server_Initialization';
+import { DashVersion } from '../../fields/DocSymbols';
export const timeMap: { [id: string]: number } = {};
interface ActivityUnit {
@@ -68,7 +69,18 @@ export default class UserManager extends ApiManager {
register({
method: Method.GET,
subscription: '/getCurrentUser',
- secureHandler: ({ res, user: { _id, email, cacheDocumentIds } }) => res.send(JSON.stringify({ id: _id, email, cacheDocumentIds })),
+ secureHandler: ({ res, user }) =>
+ res.send(
+ JSON.stringify({
+ version: DashVersion,
+ userDocumentId: user.userDocumentId,
+ linkDatabaseId: user.linkDatabaseId,
+ sharingDocumentId: user.sharingDocumentId,
+ email: user.email,
+ cacheDocumentIds: user.cacheDocumentIds,
+ resolvedPorts,
+ })
+ ),
publicHandler: ({ res }) => res.send(JSON.stringify({ id: '__guest__', email: 'guest' })),
});
diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts
index bff60568b..19cb3f240 100644
--- a/src/server/DashUploadUtils.ts
+++ b/src/server/DashUploadUtils.ts
@@ -44,7 +44,7 @@ function isLocal() {
return /Dash-Web[0-9]*[\\\/]src[\\\/]server[\\\/]public[\\\/](.*)/;
}
-function usingAzure(){
+function usingAzure() {
return process.env.USE_AZURE === 'true';
}
@@ -99,7 +99,7 @@ export namespace DashUploadUtils {
merge
.input(textFilePath)
.inputOptions(['-f concat', '-safe 0'])
- .outputOptions('-c copy')
+ // .outputOptions('-c copy')
//.videoCodec("copy")
.save(outputFilePath)
.on('error', (err: any) => {
@@ -135,36 +135,39 @@ export namespace DashUploadUtils {
};
}
- export function QueryYoutubeProgress(videoId: string) {
- return uploadProgress.get(videoId) ?? 'failed';
+ export function QueryYoutubeProgress(videoId: string, user?: Express.User) {
+ // console.log(`PROGRESS:${videoId}`, (user as any)?.email);
+ return uploadProgress.get(videoId) ?? 'pending data upload';
}
- let uploadProgress = new Map<string, string>();
+ export let uploadProgress = new Map<string, string>();
- export function uploadYoutube(videoId: string): Promise<Upload.FileResponse> {
+ export function uploadYoutube(videoId: string, overwriteId: string): Promise<Upload.FileResponse> {
return new Promise<Upload.FileResponse<Upload.FileInformation>>((res, rej) => {
- console.log('Uploading YouTube video: ' + videoId);
const name = videoId;
const path = name.replace(/^-/, '__') + '.mp4';
const finalPath = serverPathToFile(Directory.videos, path);
if (existsSync(finalPath)) {
- uploadProgress.set(videoId, 'computing duration');
+ uploadProgress.set(overwriteId, 'computing duration');
exec(`yt-dlp -o ${finalPath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any, stderr: any) => {
const time = Array.from(stdout.trim().split(':')).reverse();
const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0);
res(resolveExistingFile(name, finalPath, Directory.videos, 'video/mp4', duration, undefined));
});
} else {
- uploadProgress.set(videoId, 'starting download');
+ uploadProgress.set(overwriteId, 'starting download');
const ytdlp = spawn(`yt-dlp`, ['-o', path, `https://www.youtube.com/watch?v=${videoId}`, '--max-filesize', '100M', '-f', 'mp4']);
- ytdlp.stdout.on('data', (data: any) => !uploadProgress.get(videoId)?.includes('Aborting.') && uploadProgress.set(videoId, data.toString()));
+ ytdlp.stdout.on('data', (data: any) => uploadProgress.set(overwriteId, data.toString()));
let errors = '';
- ytdlp.stderr.on('data', (data: any) => (errors = data.toString()));
+ ytdlp.stderr.on('data', (data: any) => {
+ uploadProgress.set(overwriteId, 'error:' + data.toString());
+ errors = data.toString();
+ });
ytdlp.on('exit', function (code: any) {
- if (code || uploadProgress.get(videoId)?.includes('Aborting.')) {
+ if (code) {
res({
source: {
size: 0,
@@ -176,7 +179,7 @@ export namespace DashUploadUtils {
result: { name: 'failed youtube query', message: `Could not archive video. ${code ? errors : uploadProgress.get(videoId)}` },
});
} else {
- uploadProgress.set(videoId, 'computing duration');
+ uploadProgress.set(overwriteId, 'computing duration');
exec(`yt-dlp-o ${path} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any, stderr: any) => {
const time = Array.from(stdout.trim().split(':')).reverse();
const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0);
@@ -194,7 +197,7 @@ export namespace DashUploadUtils {
const isAzureOn = usingAzure();
const { type, path, name } = file;
const types = type?.split('/') ?? [];
- uploadProgress.set(overwriteGuid ?? name, 'uploading'); // If the client sent a guid it uses to track upload progress, use that guid. Otherwise, use the file's name.
+ uploadProgress.set(overwriteGuid ?? name, 'uploading'); // If the client sent a guid it uses to track upload progress, use that guid. Otherwise, use the file's name.
const category = types[0];
let format = `.${types[1]}`;
@@ -281,6 +284,7 @@ export namespace DashUploadUtils {
const fileKey = (await md5File(file.path)) + '.pdf';
const textFilename = `${fileKey.substring(0, fileKey.length - 4)}.txt`;
if (fExists(fileKey, Directory.pdfs) && fExists(textFilename, Directory.text)) {
+ fs.unlink(file.path, () => {});
return new Promise<Upload.FileResponse>(res => {
const textFilename = `${fileKey.substring(0, fileKey.length - 4)}.txt`;
const readStream = createReadStream(serverPathToFile(Directory.text, textFilename));
@@ -384,13 +388,18 @@ export namespace DashUploadUtils {
if ((rawMatches = /^data:image\/([a-z]+);base64,(.*)/.exec(source)) !== null) {
const [ext, data] = rawMatches.slice(1, 3);
const resolved = (filename = `upload_${Utils.GenerateGuid()}.${ext}`);
- const error = await new Promise<Error | null>(resolve => {
- writeFile(serverPathToFile(Directory.images, resolved), data, 'base64', resolve);
- });
- if (error !== null) {
- return error;
+ if (usingAzure()) {
+ const response = await AzureManager.UploadBase64ImageBlob(resolved, data);
+ source = `${AzureManager.BASE_STRING}/${resolved}`;
+ } else {
+ const error = await new Promise<Error | null>(resolve => {
+ writeFile(serverPathToFile(Directory.images, resolved), data, 'base64', resolve);
+ });
+ if (error !== null) {
+ return error;
+ }
+ source = `${resolvedServerUrl}${clientPathToFile(Directory.images, resolved)}`;
}
- source = `${resolvedServerUrl}${clientPathToFile(Directory.images, resolved)}`;
}
let resolvedUrl: string;
/**
@@ -490,13 +499,13 @@ export namespace DashUploadUtils {
/**
* UploadInspectedImage() takes an image with its metadata. If Azure is being used, this method will call the Azure function
- * to execute the resizing. If Azure is not used, the function will begin to resize the image.
- *
+ * to execute the resizing. If Azure is not used, the function will begin to resize the image.
+ *
* @param metadata metadata object from InspectImage()
* @param filename the name of the file
* @param prefix the prefix to use, which will be set to '' if none is provided.
* @param cleanUp a boolean indicating if the files should be deleted after upload. True by default.
- * @returns the accessPaths for the resized files.
+ * @returns the accessPaths for the resized files.
*/
export const UploadInspectedImage = async (metadata: Upload.InspectionResults, filename?: string, prefix = '', cleanUp = true): Promise<Upload.ImageInformation> => {
const { requestable, source, ...remaining } = metadata;
@@ -504,23 +513,26 @@ export namespace DashUploadUtils {
const { images } = Directory;
const information: Upload.ImageInformation = {
accessPaths: {
- agnostic: usingAzure() ? {
- client: BLOBSTORE_URL + `/${filename}`,
- server: BLOBSTORE_URL + `/${filename}`
- } : getAccessPaths(images, resolved)
+ agnostic: usingAzure()
+ ? {
+ client: BLOBSTORE_URL + `/${resolved}`,
+ server: BLOBSTORE_URL + `/${resolved}`,
+ }
+ : getAccessPaths(images, resolved),
},
...metadata,
};
- let writtenFiles: { [suffix: string] : string};
+ let writtenFiles: { [suffix: string]: string };
if (usingAzure()) {
if (!RESIZE_FUNCTION_URL) {
- throw new Error("Resize function URL not provided.");
+ throw new Error('Resize function URL not provided.');
}
try {
const response = await axios.post(RESIZE_FUNCTION_URL, {
- url: requestable
+ url: requestable,
+ filename: resolved,
});
writtenFiles = response.data.writtenFiles;
} catch (err) {
@@ -576,8 +588,8 @@ export namespace DashUploadUtils {
/**
* outputResizedImages takes in a readable stream and resizes the images according to the sizes defined at the top of this file.
- *
- * The new images will be saved to the server with the corresponding prefixes.
+ *
+ * The new images will be saved to the server with the corresponding prefixes.
* @param streamProvider a Stream of the image to process, taken from the /parsed_files location
* @param outputFileName the basename (No suffix) of the outputted file.
* @param outputDirectory the directory to output to, usually Directory.Images
@@ -589,11 +601,20 @@ export namespace DashUploadUtils {
const outputPath = path.resolve(outputDirectory, (writtenFiles[suffix] = InjectSize(outputFileName, suffix)));
await new Promise<void>(async (resolve, reject) => {
const source = streamProvider();
- let readStream: Stream = source instanceof Promise ? await source : source;
+ let readStream = source instanceof Promise ? await source : source;
+ let error = false;
if (resizer) {
- readStream = readStream.pipe(resizer.withMetadata());
+ readStream = readStream.pipe(resizer.withMetadata()).on('error', async args => {
+ error = true;
+ if (error) {
+ const source2 = streamProvider();
+ let readStream2: Stream | undefined;
+ readStream2 = source2 instanceof Promise ? await source2 : source2;
+ readStream2?.pipe(createWriteStream(outputPath)).on('error', resolve).on('close', resolve);
+ }
+ });
}
- readStream.pipe(createWriteStream(outputPath)).on('close', resolve).on('error', reject);
+ !error && readStream?.pipe(createWriteStream(outputPath)).on('error', resolve).on('close', resolve);
});
}
return writtenFiles;
@@ -608,7 +629,7 @@ export namespace DashUploadUtils {
return [
{ suffix: SizeSuffix.Original },
...Object.values(DashUploadUtils.Sizes).map(({ suffix, width }) => {
- let initial: sharp.Sharp | undefined = sharp().resize(width, undefined, { withoutEnlargement: true });
+ let initial: sharp.Sharp | undefined = sharp({ failOnError: false }).resize(width, undefined, { withoutEnlargement: true });
if (pngs.includes(ext)) {
initial = initial.png(pngOptions);
} else if (jpgs.includes(ext)) {
@@ -621,7 +642,7 @@ export namespace DashUploadUtils {
initial = undefined;
}
return {
- resizer: initial,
+ resizer: suffix === '_o' ? undefined : initial,
suffix,
};
}),
diff --git a/src/server/server_Initialization.ts b/src/server/server_Initialization.ts
index c1934451c..839091194 100644
--- a/src/server/server_Initialization.ts
+++ b/src/server/server_Initialization.ts
@@ -100,7 +100,7 @@ function buildWithMiddleware(server: express.Express) {
passport.session(),
(req: express.Request, res: express.Response, next: express.NextFunction) => {
res.locals.user = req.user;
- if (req.originalUrl.endsWith('.png') /*|| req.originalUrl.endsWith(".js")*/ && req.method === 'GET' && (res as any)._contentLength) {
+ if ((req.originalUrl.endsWith('.png') || req.originalUrl.endsWith('.jpg') || (process.env.RELEASE === 'true' && req.originalUrl.endsWith('.js'))) && req.method === 'GET') {
const period = 30000;
res.set('Cache-control', `public, max-age=${period}`);
} else {
@@ -149,53 +149,78 @@ function registerAuthenticationRoutes(server: express.Express) {
function registerCorsProxy(server: express.Express) {
server.use('/corsProxy', async (req, res) => {
- //const referer = req.headers.referer ? decodeURIComponent(req.headers.referer) : '';
- let requrl = decodeURIComponent(req.url.substring(1));
- const qsplit = requrl.split('?q=');
- const newqsplit = requrl.split('&q=');
+ res.setHeader('Access-Control-Allow-Origin', '*');
+ res.header('Access-Control-Allow-Methods', 'GET, PUT, PATCH, POST, DELETE');
+ res.header('Access-Control-Allow-Headers', req.header('access-control-request-headers'));
+ const referer = req.headers.referer ? decodeURIComponent(req.headers.referer) : '';
+ let requrlraw = decodeURIComponent(req.url.substring(1));
+ const qsplit = requrlraw.split('?q=');
+ const newqsplit = requrlraw.split('&q=');
if (qsplit.length > 1 && newqsplit.length > 1) {
const lastq = newqsplit[newqsplit.length - 1];
- requrl = qsplit[0] + '?q=' + lastq.split('&')[0] + '&' + qsplit[1].split('&')[1];
+ requrlraw = qsplit[0] + '?q=' + lastq.split('&')[0] + '&' + qsplit[1].split('&')[1];
+ }
+ const requrl = requrlraw.startsWith('/') ? referer + requrlraw : requrlraw;
+ // cors weirdness here...
+ // if the referer is a cors page and the cors() route (I think) redirected to /corsProxy/<path> and the requested url path was relative,
+ // then we redirect again to the cors referer and just add the relative path.
+ if (!requrl.startsWith('http') && req.originalUrl.startsWith('/corsProxy') && referer?.includes('corsProxy')) {
+ res.redirect(referer + (referer.endsWith('/') ? '' : '/') + requrl);
+ } else {
+ proxyServe(req, requrl, res);
}
- proxyServe(req, requrl, res);
});
}
function proxyServe(req: any, requrl: string, response: any) {
const htmlBodyMemoryStream = new (require('memorystream'))();
- var retrieveHTTPBody: any;
var wasinBrFormat = false;
const sendModifiedBody = () => {
const header = response.headers['content-encoding'];
- const httpsToCors = (match: any, href: string, offset: any, string: any) => `href="${resolvedServerUrl + '/corsProxy/http' + href}"`;
- if (header?.includes('gzip')) {
+ const refToCors = (match: any, tag: string, sym: string, href: string, offset: any, string: any) => `${tag}=${sym + resolvedServerUrl}/corsProxy/${href + sym}`;
+ const relpathToCors = (match: any, href: string, offset: any, string: any) => `="${resolvedServerUrl + '/corsProxy/' + decodeURIComponent(req.originalUrl.split('/corsProxy/')[1].match(/https?:\/\/[^\/]*/)?.[0] ?? '') + '/' + href}"`;
+ if (header) {
try {
const bodyStream = htmlBodyMemoryStream.read();
if (bodyStream) {
- const htmlInputText = wasinBrFormat ? Buffer.from(brotli.decompress(bodyStream)) : zlib.gunzipSync(bodyStream);
+ const htmlInputText = wasinBrFormat ? Buffer.from(brotli.decompress(bodyStream)) : header.includes('gzip') ? zlib.gunzipSync(bodyStream) : bodyStream;
const htmlText = htmlInputText
.toString('utf8')
.replace('<head>', '<head> <style>[id ^= "google"] { display: none; } </style>')
- // .replace(/href="https?([^"]*)"/g, httpsToCors)
+ .replace(/(src|href)=([\'\"])(https?[^\2\n]*)\1/g, refToCors) // replace src or href='http(s)://...' or href="http(s)://.."
+ //.replace(/= *"\/([^"]*)"/g, relpathToCors)
.replace(/data-srcset="[^"]*"/g, '')
.replace(/srcset="[^"]*"/g, '')
.replace(/target="_blank"/g, '');
- response.send(zlib.gzipSync(htmlText));
+ response.send(header?.includes('gzip') ? zlib.gzipSync(htmlText) : htmlText);
} else {
- req.pipe(request(requrl)).pipe(response);
+ req.pipe(request(requrl))
+ .on('error', (e: any) => console.log('requrl ', e))
+ .pipe(response)
+ .on('error', (e: any) => console.log('response pipe error', e));
console.log('EMPTY body:' + req.url);
}
} catch (e) {
console.log('ERROR?: ', e);
}
} else {
- req.pipe(htmlBodyMemoryStream).pipe(response);
+ req.pipe(htmlBodyMemoryStream)
+ .on('error', (e: any) => console.log('html body memorystream error', e))
+ .pipe(response)
+ .on('error', (e: any) => console.log('html body memory stream response error', e));
}
};
- retrieveHTTPBody = () => {
- req.headers.cookie = '';
+ const retrieveHTTPBody = () => {
+ //req.headers.cookie = '';
req.pipe(request(requrl))
- .on('error', (e: any) => console.log(`Malformed CORS url: ${requrl}`, e))
+ .on('error', (e: any) => {
+ console.log(`CORS url error: ${requrl}`, e);
+ response.send(`<html><body bgcolor="red" link="006666" alink="8B4513" vlink="006666">
+ <title>Error</title>
+ <div align="center"><h1>Failed to load: ${requrl} </h1></div>
+ <p>${e}</p>
+ </body></html>`);
+ })
.on('response', (res: any) => {
res.headers;
const headers = Object.keys(res.headers);
@@ -218,16 +243,18 @@ function proxyServe(req: any, requrl: string, response: any) {
response.headers = response._headers = res.headers;
})
.on('end', sendModifiedBody)
- .pipe(htmlBodyMemoryStream);
+ .pipe(htmlBodyMemoryStream)
+ .on('error', (e: any) => console.log('http body pipe error', e));
};
retrieveHTTPBody();
}
function registerEmbeddedBrowseRelativePathHandler(server: express.Express) {
server.use('*', (req, res) => {
+ // res.setHeader('Access-Control-Allow-Origin', '*');
+ // res.header('Access-Control-Allow-Methods', 'GET, PUT, PATCH, POST, DELETE');
+ // res.header('Access-Control-Allow-Headers', req.header('access-control-request-headers'));
const relativeUrl = req.originalUrl;
- // if (req.originalUrl === '/css/main.css' || req.originalUrl === '/favicon.ico') res.end();
- // else
if (!res.headersSent && req.headers.referer?.includes('corsProxy')) {
if (!req.user) res.redirect('/home'); // When no user is logged in, we interpret a relative URL as being a reference to something they don't have access to and redirect to /home
// a request for something by a proxied referrer means it must be a relative reference. So construct a proxied absolute reference here.
@@ -237,8 +264,8 @@ function registerEmbeddedBrowseRelativePathHandler(server: express.Express) {
const actualReferUrl = proxiedRefererUrl.replace(dashServerUrl, ''); // the url of the referer without the proxy (e.g., : https://en.wikipedia.org/wiki/Engelbart)
const absoluteTargetBaseUrl = actualReferUrl.match(/https?:\/\/[^\/]*/)![0]; // the base of the original url (e.g., https://en.wikipedia.org)
const redirectedProxiedUrl = dashServerUrl + encodeURIComponent(absoluteTargetBaseUrl + relativeUrl); // the new proxied full url (e.g., http://localhost:<port>/corsProxy/https://en.wikipedia.org/<somethingelse>)
- if (relativeUrl.startsWith('//')) res.redirect('http:' + relativeUrl);
- else res.redirect(redirectedProxiedUrl);
+ const redirectUrl = relativeUrl.startsWith('//') ? 'http:' + relativeUrl : redirectedProxiedUrl;
+ res.redirect(redirectUrl);
} catch (e) {
console.log('Error embed: ', e);
}