aboutsummaryrefslogtreecommitdiff
path: root/src/server/DashUploadUtils.ts
diff options
context:
space:
mode:
Diffstat (limited to 'src/server/DashUploadUtils.ts')
-rw-r--r--src/server/DashUploadUtils.ts152
1 files changed, 85 insertions, 67 deletions
diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts
index 08cea1de5..1e55a885a 100644
--- a/src/server/DashUploadUtils.ts
+++ b/src/server/DashUploadUtils.ts
@@ -1,14 +1,14 @@
import axios from 'axios';
+import { exec, spawn } from 'child_process';
import { green, red } from 'colors';
-import { ExifImage } from 'exif';
+import { ExifData, ExifImage } from 'exif';
import * as exifr from 'exifr';
import * as ffmpeg from 'fluent-ffmpeg';
import * as formidable from 'formidable';
import { File } from 'formidable';
import * as fs from 'fs';
import { createReadStream, createWriteStream, existsSync, readFileSync, rename, unlinkSync, writeFile } from 'fs';
-import Jimp from 'jimp';
-import * as autorotate from 'jpeg-autorotate';
+import { Jimp } from 'jimp';
import * as md5File from 'md5-file';
import * as path from 'path';
import { basename } from 'path';
@@ -18,13 +18,49 @@ import { Duplex, Stream } from 'stream';
import { Utils } from '../Utils';
import { createIfNotExists } from './ActionUtilities';
import { AzureManager } from './ApiManagers/AzureManager';
-import { ParsedPDF } from './PdfTypes';
import { AcceptableMedia, Upload } from './SharedMediaTypes';
import { Directory, clientPathToFile, filesDirectory, pathToDirectory, publicDirectory, serverPathToFile } from './SocketData';
import { resolvedServerUrl } from './server_Initialization';
-const { spawn } = require('child_process');
-const { exec } = require('child_process');
+import { Worker, isMainThread, parentPort } from 'worker_threads';
+
+// Create an array to store worker threads
+enum workertasks {
+ JIMP = 'jimp',
+}
+const JimpWorker: Worker | undefined = isMainThread ? new Worker(__filename) : undefined;
+export const workerResample = (imgSourcePath: string, outputPath: string, origSuffix: SizeSuffix, unlinkSource: boolean) => {
+ JimpWorker?.postMessage({ task: workertasks.JIMP, imgSourcePath, outputPath, origSuffix, unlinkSource });
+};
+
+if (isMainThread) {
+ // main thread code if needed ...
+} else {
+ // Worker thread code - Listens for messages from the main thread
+ parentPort?.on('message', message => {
+ switch (message.task) {
+ case workertasks.JIMP:
+ return workerResampleImage(message);
+ default:
+ }
+ });
+
+ async function workerResampleImage(message: { imgSourcePath: string; outputPath: string; origSuffix: string; unlinkSource: boolean }) {
+ const { imgSourcePath, outputPath, origSuffix, unlinkSource } = message;
+ const sizes = !origSuffix ? [{ width: 400, suffix: SizeSuffix.Medium }] : DashUploadUtils.imageResampleSizes(path.extname(imgSourcePath));
+ // prettier-ignore
+ Jimp.read(imgSourcePath)
+ .then(img =>
+ sizes.forEach(({ width, suffix }) =>
+ img.resize({ w: width || img.bitmap.width })
+ .write(InjectSize(outputPath, suffix) as `${string}.${string}`)
+ ))
+ .catch(e => console.log('Error Jimp:', e))
+ .finally(() => unlinkSource && unlinkSync(imgSourcePath));
+ }
+}
+
+// eslint-disable-next-line @typescript-eslint/no-var-requires
const requestImageSize = require('../client/util/request-image-size');
export enum SizeSuffix {
@@ -111,7 +147,7 @@ export namespace DashUploadUtils {
// .outputOptions('-c copy')
// .videoCodec("copy")
.save(outputFilePath)
- .on('error', (err: any) => {
+ .on('error', err => {
console.log(err);
reject();
})
@@ -130,8 +166,8 @@ export namespace DashUploadUtils {
}
function resolveExistingFile(name: string, pat: string, directory: Directory, mimetype?: string | null, duration?: number, rawText?: string): Upload.FileResponse<Upload.FileInformation> {
- const data = { size: 0, filepath: pat, name, type: mimetype ?? '', originalFilename: name, newFilename: path.basename(pat), mimetype: mimetype || null, hashAlgorithm: false as any };
- const file = { ...data, toJSON: () => ({ ...data, length: 0, filename: data.filepath.replace(/.*\//, ''), mtime: new Date(), mimetype: mimetype || null, toJson: () => undefined as any }) };
+ const data = { size: 0, filepath: pat, name, type: mimetype ?? '', originalFilename: name, newFilename: path.basename(pat), mimetype: mimetype || null, hashAlgorithm: false as falsetype };
+ const file = { ...data, toJSON: () => ({ ...data, length: 0, filename: data.filepath.replace(/.*\//, ''), mtime: new Date(), mimetype: mimetype || null }) };
return {
source: file || null,
result: {
@@ -184,11 +220,10 @@ export namespace DashUploadUtils {
const parseExifData = async (source: string) => {
const image = await request.get(source, { encoding: null });
- const { /* data, */ error } = await new Promise<{ data: any; error: any }>(resolve => {
+ const { /* data, */ error } = await new Promise<{ data: ExifData; error: string | undefined }>(resolve => {
// eslint-disable-next-line no-new
new ExifImage({ image }, (exifError, data) => {
- const reason = (exifError as any)?.code;
- resolve({ data, error: reason });
+ resolve({ data, error: exifError?.message });
});
});
return error ? { data: undefined, error } : { data: await exifr.parse(image), error };
@@ -252,11 +287,12 @@ export namespace DashUploadUtils {
};
// Use the request library to parse out file level image information in the headers
- const { headers } = await new Promise<any>((resolve, reject) => {
- request.head(resolvedUrl, (error, res) => (error ? reject(error) : resolve(res)));
+ const headerResult = await new Promise<{ headers: { [key: string]: string } }>((resolve, reject) => {
+ request.head(resolvedUrl, (error, res) => (error ? reject(error) : resolve(res as { headers: { [key: string]: string } })));
}).catch(e => {
console.log('Error processing headers: ', e);
});
+ const { headers } = headerResult !== null && typeof headerResult === 'object' ? headerResult : { headers: {} as { [key: string]: string } };
try {
// Compute the native width and height ofthe image with an npm module
@@ -272,21 +308,12 @@ export namespace DashUploadUtils {
filename,
...results,
};
- } catch (e: any) {
+ } catch (e: unknown) {
console.log(e);
- return e;
+ return new Error(e ? e.toString?.() : 'unkown error');
}
};
- async function correctRotation(imgSourcePath: string) {
- const buffer = fs.readFileSync(imgSourcePath);
- try {
- return (await autorotate.rotate(buffer, { quality: 30 })).buffer;
- } catch (e) {
- return buffer;
- }
- }
-
/**
* define the resizers to use
* @param ext the extension
@@ -311,38 +338,23 @@ export namespace DashUploadUtils {
* @param outputDirectory the directory to output to, usually Directory.Images
* @returns a map with suffixes as keys and resized filenames as values.
*/
- export async function outputResizedImages(imgSourcePath: string, outputFileName: string, outputDirectory: string) {
+ export async function outputResizedImages(imgSourcePath: string, outputFileName: string, unlinkSource: boolean) {
const writtenFiles: { [suffix: string]: string } = {};
+ const outputPath = path.resolve(pathToDirectory(Directory.images), outputFileName);
const sizes = imageResampleSizes(path.extname(outputFileName));
- const imgBuffer = await correctRotation(imgSourcePath);
const imgReadStream = new Duplex();
- imgReadStream.push(imgBuffer);
+ imgReadStream.push(fs.readFileSync(imgSourcePath));
imgReadStream.push(null);
- const outputPath = (suffix: SizeSuffix) => {
- writtenFiles[suffix] = InjectSize(outputFileName, suffix);
- return path.resolve(outputDirectory, writtenFiles[suffix]);
- };
await Promise.all(
- sizes.filter(({ width }) => !width).map(({ suffix }) =>
- new Promise<void>(res => {
- imgReadStream.pipe(createWriteStream(outputPath(suffix))).on('close', res);
- })
+ sizes.map(({ suffix }) =>
+ new Promise<unknown>(res =>
+ imgReadStream.pipe(createWriteStream(writtenFiles[suffix] = InjectSize(outputPath, suffix))).on('close', res)
+ )
)); // prettier-ignore
- return Jimp.read(imgBuffer)
- .then(async (imgIn: any) => {
- let img = imgIn;
- await Promise.all( sizes.filter(({ width }) => width).map(({ width, suffix }) => {
- img = img.resize(width, Jimp.AUTO).write(outputPath(suffix));
- return img;
- } )); // prettier-ignore
- return writtenFiles;
- })
- .catch((e: any) => {
- console.log('ERROR' + e);
- return writtenFiles;
- });
+ workerResample(imgSourcePath, outputPath, SizeSuffix.Original, unlinkSource);
+ return writtenFiles;
}
/**
@@ -388,8 +400,9 @@ export namespace DashUploadUtils {
writtenFiles = {};
}
} else {
+ const unlinkSrcWhenFinished = isLocal().test(source) && cleanUp;
try {
- writtenFiles = await outputResizedImages(metadata.source, resolved, pathToDirectory(Directory.images));
+ writtenFiles = await outputResizedImages(metadata.source, resolved, unlinkSrcWhenFinished);
} catch (e) {
// input is a blob or other, try reading it to create a metadata source file.
const reqSource = request(metadata.source);
@@ -401,16 +414,14 @@ export namespace DashUploadUtils {
.on('close', () => res())
.on('error', () => rej());
});
- writtenFiles = await outputResizedImages(readSource, resolved, pathToDirectory(Directory.images));
+ writtenFiles = await outputResizedImages(readSource, resolved, unlinkSrcWhenFinished);
fs.unlink(readSource, err => console.log("Couldn't unlink temporary image file:" + readSource, err));
}
}
Array.from(Object.keys(writtenFiles)).forEach(suffix => {
information.accessPaths[suffix] = getAccessPaths(images, writtenFiles[suffix]);
});
- if (isLocal().test(source) && cleanUp) {
- unlinkSync(source);
- }
+
return information;
};
@@ -432,15 +443,17 @@ export namespace DashUploadUtils {
* 4) the content type of the image, i.e. image/(jpeg | png | ...)
*/
export const UploadImage = async (source: string, filename?: string, prefix: string = ''): Promise<Upload.ImageInformation | Error> => {
- const metadata = await InspectImage(source);
- if (metadata instanceof Error) {
- return { name: metadata.name, message: metadata.message };
+ const result = await InspectImage(source);
+ if (result instanceof Error) {
+ return { name: result.name, message: result.message };
}
- const outputFile = filename || metadata.filename || '';
+ const outputFile = filename || result.filename || '';
- return UploadInspectedImage(metadata, outputFile, prefix);
+ return UploadInspectedImage(result, outputFile, prefix);
};
+ type md5 = 'md5';
+ type falsetype = false;
export function uploadYoutube(videoId: string, overwriteId: string): Promise<Upload.FileResponse> {
return new Promise<Upload.FileResponse<Upload.FileInformation>>(res => {
const name = videoId;
@@ -448,6 +461,7 @@ export namespace DashUploadUtils {
const finalPath = serverPathToFile(Directory.videos, filepath);
if (existsSync(finalPath)) {
uploadProgress.set(overwriteId, 'computing duration');
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
exec(`yt-dlp -o ${finalPath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any /* , stderr: any */) => {
const time = Array.from(stdout.trim().split(':')).reverse();
const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0);
@@ -457,14 +471,17 @@ export namespace DashUploadUtils {
uploadProgress.set(overwriteId, 'starting download');
const ytdlp = spawn(`yt-dlp`, ['-o', filepath, `https://www.youtube.com/watch?v=${videoId}`, '--max-filesize', '100M', '-f', 'mp4']);
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
ytdlp.stdout.on('data', (data: any) => uploadProgress.set(overwriteId, data.toString()));
let errors = '';
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
ytdlp.stderr.on('data', (data: any) => {
uploadProgress.set(overwriteId, 'error:' + data.toString());
errors = data.toString();
});
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
ytdlp.on('exit', (code: any) => {
if (code) {
res({
@@ -484,8 +501,8 @@ export namespace DashUploadUtils {
exec(`yt-dlp-o ${filepath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (/* error: any, stdout: any, stderr: any */) => {
// const time = Array.from(stdout.trim().split(':')).reverse();
// const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0);
- const data = { size: 0, filepath, name, mimetype: 'video', originalFilename: name, newFilename: name, hashAlgorithm: 'md5' as 'md5', type: 'video/mp4' };
- const file = { ...data, toJSON: () => ({ ...data, length: 0, filename: data.filepath.replace(/.*\//, ''), mtime: new Date(), toJson: () => undefined as any }) };
+ const data = { size: 0, filepath, name, mimetype: 'video', originalFilename: name, newFilename: name, hashAlgorithm: 'md5' as md5, type: 'video/mp4' };
+ const file = { ...data, toJSON: () => ({ ...data, length: 0, filename: data.filepath.replace(/.*\//, ''), mtime: new Date() }) };
MoveParsedFile(file, Directory.videos).then(output => res(output));
});
}
@@ -517,15 +534,15 @@ export namespace DashUploadUtils {
});
}
const dataBuffer = readFileSync(file.filepath);
- const result: ParsedPDF | any = await parse(dataBuffer).catch((e: any) => e);
- if (!result.code) {
+ const result: parse.Result = await parse(dataBuffer).catch(e => e);
+ if (result) {
await new Promise<void>((resolve, reject) => {
const writeStream = createWriteStream(serverPathToFile(Directory.text, textFilename));
writeStream.write(result?.text, error => (error ? reject(error) : resolve()));
});
return MoveParsedFile(file, Directory.pdfs, undefined, result?.text, undefined, fileKey);
}
- return { source: file, result: { name: 'faile pdf pupload', message: `Could not upload (${file.originalFilename}).${result.message}` } };
+ return { source: file, result: { name: 'faile pdf pupload', message: `Could not upload (${file.originalFilename}).${result}` } };
}
async function UploadCsv(file: File) {
@@ -563,7 +580,7 @@ export namespace DashUploadUtils {
.videoCodec('copy') // this will copy the data instead of reencode it
.save(vidFile.filepath.replace('.mkv', '.mp4'))
.on('end', res)
- .on('error', (e: any) => console.log(e));
+ .on('error', console.log);
});
vidFile.filepath = vidFile.filepath.replace('.mkv', '.mp4');
format = '.mp4';
@@ -571,8 +588,9 @@ export namespace DashUploadUtils {
if (format.includes('quicktime')) {
let abort = false;
await new Promise<void>(res => {
- ffmpeg.ffprobe(vidFile.filepath, (err: any, metadata: any) => {
- if (metadata.streams.some((stream: any) => stream.codec_name === 'hevc')) {
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ ffmpeg.ffprobe(vidFile.filepath, (err: any, metadata: ffmpeg.FfprobeData) => {
+ if (metadata.streams.some(stream => stream.codec_name === 'hevc')) {
abort = true;
}
res();