aboutsummaryrefslogtreecommitdiff
path: root/src/server/DashUploadUtils.ts
diff options
context:
space:
mode:
Diffstat (limited to 'src/server/DashUploadUtils.ts')
-rw-r--r--src/server/DashUploadUtils.ts235
1 files changed, 122 insertions, 113 deletions
diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts
index 19cb3f240..b1a7a9c5e 100644
--- a/src/server/DashUploadUtils.ts
+++ b/src/server/DashUploadUtils.ts
@@ -1,31 +1,32 @@
+import axios from 'axios';
import { green, red } from 'colors';
import { ExifImage } from 'exif';
import * as exifr from 'exifr';
+import * as ffmpeg from 'fluent-ffmpeg';
+import * as formidable from 'formidable';
import { File } from 'formidable';
+import * as fs from 'fs';
import { createReadStream, createWriteStream, existsSync, readFileSync, rename, unlinkSync, writeFile } from 'fs';
+import Jimp from 'jimp';
+import * as autorotate from 'jpeg-autorotate';
+import * as md5File from 'md5-file';
import * as path from 'path';
import { basename } from 'path';
-import * as sharp from 'sharp';
-import { Readable, Stream } from 'stream';
+import * as parse from 'pdf-parse';
+import * as request from 'request-promise';
+import { Duplex } from 'stream';
import { filesDirectory, publicDirectory } from '.';
+import { Utils } from '../Utils';
import { Opt } from '../fields/Doc';
import { ParsedPDF } from '../server/PdfTypes';
-import { Utils } from '../Utils';
import { createIfNotExists } from './ActionUtilities';
-import { clientPathToFile, Directory, pathToDirectory, serverPathToFile } from './ApiManagers/UploadManager';
-import { resolvedServerUrl } from './server_Initialization';
-import { AcceptableMedia, Upload } from './SharedMediaTypes';
-import request = require('request-promise');
-import formidable = require('formidable');
import { AzureManager } from './ApiManagers/AzureManager';
-import axios from 'axios';
+import { Directory, clientPathToFile, pathToDirectory, serverPathToFile } from './ApiManagers/UploadManager';
+import { AcceptableMedia, Upload } from './SharedMediaTypes';
+import { resolvedServerUrl } from './server_Initialization';
const spawn = require('child_process').spawn;
const { exec } = require('child_process');
-const parse = require('pdf-parse');
-const ffmpeg = require('fluent-ffmpeg');
-const fs = require('fs');
const requestImageSize = require('../client/util/request-image-size');
-const md5File = require('md5-file');
export enum SizeSuffix {
Small = '_s',
@@ -55,9 +56,9 @@ export namespace DashUploadUtils {
}
export const Sizes: { [size: string]: Size } = {
- SMALL: { width: 100, suffix: SizeSuffix.Small },
+ LARGE: { width: 800, suffix: SizeSuffix.Large },
MEDIUM: { width: 400, suffix: SizeSuffix.Medium },
- LARGE: { width: 900, suffix: SizeSuffix.Large },
+ SMALL: { width: 100, suffix: SizeSuffix.Small },
};
export function validateExtension(url: string) {
@@ -94,9 +95,8 @@ export namespace DashUploadUtils {
const outputFilePath = path.join(pathToDirectory(Directory.videos), outputFileName);
// concatenate the videos
- await new Promise((resolve, reject) => {
- var merge = ffmpeg();
- merge
+ await new Promise((resolve, reject) =>
+ ffmpeg()
.input(textFilePath)
.inputOptions(['-f concat', '-safe 0'])
// .outputOptions('-c copy')
@@ -106,8 +106,8 @@ export namespace DashUploadUtils {
console.log(err);
reject();
})
- .on('end', resolve);
- });
+ .on('end', resolve)
+ );
// delete concat.txt from the file system
unlinkSync(textFilePath);
@@ -120,14 +120,14 @@ export namespace DashUploadUtils {
};
}
- function resolveExistingFile(name: string, pat: string, directory: Directory, type?: string, duration?: number, rawText?: string) {
- const data = { size: 0, path: path.basename(pat), name, type: type ?? '' };
- const file = { ...data, toJSON: () => ({ ...data, filename: data.path.replace(/.*\//, ''), mtime: duration?.toString(), mime: '', toJson: () => undefined as any }) };
+ function resolveExistingFile(name: string, pat: string, directory: Directory, mimetype?: string | null, duration?: number, rawText?: string): Upload.FileResponse<Upload.FileInformation> {
+ const data = { size: 0, filepath: pat, name, type: mimetype ?? '', originalFilename: name, newFilename: path.basename(pat), mimetype: mimetype || null, hashAlgorithm: false as any };
+ const file = { ...data, toJSON: () => ({ ...data, length: 0, filename: data.filepath.replace(/.*\//, ''), mtime: new Date(), mimetype: mimetype || null, toJson: () => undefined as any }) };
return {
- source: file,
+ source: file || null,
result: {
accessPaths: {
- agnostic: getAccessPaths(directory, data.path),
+ agnostic: getAccessPaths(directory, data.filepath),
},
rawText,
duration,
@@ -145,18 +145,18 @@ export namespace DashUploadUtils {
export function uploadYoutube(videoId: string, overwriteId: string): Promise<Upload.FileResponse> {
return new Promise<Upload.FileResponse<Upload.FileInformation>>((res, rej) => {
const name = videoId;
- const path = name.replace(/^-/, '__') + '.mp4';
- const finalPath = serverPathToFile(Directory.videos, path);
+ const filepath = name.replace(/^-/, '__') + '.mp4';
+ const finalPath = serverPathToFile(Directory.videos, filepath);
if (existsSync(finalPath)) {
uploadProgress.set(overwriteId, 'computing duration');
exec(`yt-dlp -o ${finalPath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any, stderr: any) => {
const time = Array.from(stdout.trim().split(':')).reverse();
const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0);
- res(resolveExistingFile(name, finalPath, Directory.videos, 'video/mp4', duration, undefined));
+ res(resolveExistingFile(name, filepath, Directory.videos, 'video/mp4', duration, undefined));
});
} else {
uploadProgress.set(overwriteId, 'starting download');
- const ytdlp = spawn(`yt-dlp`, ['-o', path, `https://www.youtube.com/watch?v=${videoId}`, '--max-filesize', '100M', '-f', 'mp4']);
+ const ytdlp = spawn(`yt-dlp`, ['-o', filepath, `https://www.youtube.com/watch?v=${videoId}`, '--max-filesize', '100M', '-f', 'mp4']);
ytdlp.stdout.on('data', (data: any) => uploadProgress.set(overwriteId, data.toString()));
@@ -171,21 +171,26 @@ export namespace DashUploadUtils {
res({
source: {
size: 0,
- path,
- name,
- type: '',
- toJSON: () => ({ name, path }),
+ filepath: name,
+ originalFilename: name,
+ newFilename: name,
+ mimetype: 'video',
+ hashAlgorithm: 'md5',
+ toJSON: () => ({ newFilename: name, filepath, mimetype: 'video', mtime: new Date(), size: 0, length: 0, originalFilename: name }),
},
result: { name: 'failed youtube query', message: `Could not archive video. ${code ? errors : uploadProgress.get(videoId)}` },
});
} else {
uploadProgress.set(overwriteId, 'computing duration');
- exec(`yt-dlp-o ${path} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any, stderr: any) => {
+ exec(`yt-dlp-o ${filepath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any, stderr: any) => {
const time = Array.from(stdout.trim().split(':')).reverse();
const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0);
- const data = { size: 0, path, name, type: 'video/mp4' };
- const file = { ...data, toJSON: () => ({ ...data, filename: data.path.replace(/.*\//, ''), mtime: duration.toString(), mime: '', toJson: () => undefined as any }) };
- res(MoveParsedFile(file, Directory.videos));
+ const data = { size: 0, filepath, name, mimetype: 'video', originalFilename: name, newFilename: name, hashAlgorithm: 'md5' as 'md5', type: 'video/mp4' };
+ const file = { ...data, toJSON: () => ({ ...data, length: 0, filename: data.filepath.replace(/.*\//, ''), mtime: new Date(), toJson: () => undefined as any }) };
+ MoveParsedFile(file, Directory.videos).then(output => {
+ console.log('OUTPUT = ' + output);
+ res(output);
+ });
});
}
});
@@ -195,39 +200,39 @@ export namespace DashUploadUtils {
export async function upload(file: File, overwriteGuid?: string): Promise<Upload.FileResponse> {
const isAzureOn = usingAzure();
- const { type, path, name } = file;
+ const { mimetype: type, filepath, originalFilename } = file;
const types = type?.split('/') ?? [];
- uploadProgress.set(overwriteGuid ?? name, 'uploading'); // If the client sent a guid it uses to track upload progress, use that guid. Otherwise, use the file's name.
+ // uploadProgress.set(overwriteGuid ?? name, 'uploading'); // If the client sent a guid it uses to track upload progress, use that guid. Otherwise, use the file's name.
const category = types[0];
let format = `.${types[1]}`;
- console.log(green(`Processing upload of file (${name}) and format (${format}) with upload type (${type}) in category (${category}).`));
+ console.log(green(`Processing upload of file (${originalFilename}) and format (${format}) with upload type (${type}) in category (${category}).`));
switch (category) {
case 'image':
if (imageFormats.includes(format)) {
- const result = await UploadImage(path, basename(path));
+ const result = await UploadImage(filepath, basename(filepath));
return { source: file, result };
}
- fs.unlink(path, () => {});
- return { source: file, result: { name: 'Unsupported image format', message: `Could not upload unsupported file (${name}). Please convert to an .jpg` } };
+ fs.unlink(filepath, () => {});
+ return { source: file, result: { name: 'Unsupported image format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .jpg` } };
case 'video':
if (format.includes('x-matroska')) {
console.log('case video');
await new Promise(res =>
- ffmpeg(file.path)
+ ffmpeg(file.filepath)
.videoCodec('copy') // this will copy the data instead of reencode it
- .save(file.path.replace('.mkv', '.mp4'))
+ .save(file.filepath.replace('.mkv', '.mp4'))
.on('end', res)
.on('error', (e: any) => console.log(e))
);
- file.path = file.path.replace('.mkv', '.mp4');
+ file.filepath = file.filepath.replace('.mkv', '.mp4');
format = '.mp4';
}
if (format.includes('quicktime')) {
let abort = false;
await new Promise<void>(res =>
- ffmpeg.ffprobe(file.path, (err: any, metadata: any) => {
+ ffmpeg.ffprobe(file.filepath, (err: any, metadata: any) => {
if (metadata.streams.some((stream: any) => stream.codec_name === 'hevc')) {
abort = true;
}
@@ -245,15 +250,15 @@ export namespace DashUploadUtils {
// );
// file.path = file.path.replace('.mov', '.mp4').replace('.MOV', '.mp4');
// format = '.mp4';
- fs.unlink(path, () => {});
- return { source: file, result: { name: 'Unsupported video format', message: `Could not upload unsupported file (${name}). Please convert to an .mp4` } };
+ fs.unlink(filepath, () => {});
+ return { source: file, result: { name: 'Unsupported video format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .mp4` } };
}
}
if (videoFormats.includes(format) || format.includes('.webm')) {
return MoveParsedFile(file, Directory.videos);
}
- fs.unlink(path, () => {});
- return { source: file, result: { name: 'Unsupported video format', message: `Could not upload unsupported file (${name}). Please convert to an .mp4` } };
+ fs.unlink(filepath, () => {});
+ return { source: file, result: { name: 'Unsupported video format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .mp4` } };
case 'application':
if (applicationFormats.includes(format)) {
const val = UploadPdf(file);
@@ -267,32 +272,34 @@ export namespace DashUploadUtils {
if (audioFormats.includes(format)) {
return UploadAudio(file, format);
}
- fs.unlink(path, () => {});
- return { source: file, result: { name: 'Unsupported audio format', message: `Could not upload unsupported file (${name}). Please convert to an .mp3` } };
+ fs.unlink(filepath, () => {});
+ return { source: file, result: { name: 'Unsupported audio format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .mp3` } };
case 'text':
if (types[1] == 'csv') {
return UploadCsv(file);
}
}
- console.log(red(`Ignoring unsupported file (${name}) with upload type (${type}).`));
- fs.unlink(path, () => {});
- return { source: file, result: new Error(`Could not upload unsupported file (${name}) with upload type (${type}).`) };
+ console.log(red(`Ignoring unsupported file (${originalFilename}) with upload type (${type}).`));
+ fs.unlink(filepath, () => {});
+ return { source: file, result: new Error(`Could not upload unsupported file (${originalFilename}) with upload type (${type}).`) };
}
async function UploadPdf(file: File) {
- const fileKey = (await md5File(file.path)) + '.pdf';
+ const fileKey = (await md5File(file.filepath)) + '.pdf';
const textFilename = `${fileKey.substring(0, fileKey.length - 4)}.txt`;
if (fExists(fileKey, Directory.pdfs) && fExists(textFilename, Directory.text)) {
- fs.unlink(file.path, () => {});
+ fs.unlink(file.filepath, () => {});
return new Promise<Upload.FileResponse>(res => {
const textFilename = `${fileKey.substring(0, fileKey.length - 4)}.txt`;
const readStream = createReadStream(serverPathToFile(Directory.text, textFilename));
var rawText = '';
- readStream.on('data', chunk => (rawText += chunk.toString())).on('end', () => res(resolveExistingFile(file.name, fileKey, Directory.pdfs, file.type, undefined, rawText)));
+ readStream
+ .on('data', chunk => (rawText += chunk.toString())) //
+ .on('end', () => res(resolveExistingFile(file.originalFilename ?? '', fileKey, Directory.pdfs, file.mimetype, undefined, rawText)));
});
}
- const dataBuffer = readFileSync(file.path);
+ const dataBuffer = readFileSync(file.filepath);
const result: ParsedPDF | any = await parse(dataBuffer).catch((e: any) => e);
if (!result.code) {
await new Promise<void>((resolve, reject) => {
@@ -301,11 +308,11 @@ export namespace DashUploadUtils {
});
return MoveParsedFile(file, Directory.pdfs, undefined, result?.text, undefined, fileKey);
}
- return { source: file, result: { name: 'faile pdf pupload', message: `Could not upload (${file.name}).${result.message}` } };
+ return { source: file, result: { name: 'faile pdf pupload', message: `Could not upload (${file.originalFilename}).${result.message}` } };
}
async function UploadCsv(file: File) {
- const { path: sourcePath } = file;
+ const { filepath: sourcePath } = file;
// read the file as a string
const data = readFileSync(sourcePath, 'utf8');
// split the string into an array of lines
@@ -342,7 +349,11 @@ export namespace DashUploadUtils {
if (metadata instanceof Error) {
return { name: metadata.name, message: metadata.message };
}
- return UploadInspectedImage(metadata, filename || metadata.filename, prefix);
+ const outputFile = filename || metadata.filename;
+ if (!outputFile) {
+ return { name: source, message: 'output file not found' };
+ }
+ return UploadInspectedImage(metadata, outputFile, prefix);
};
export async function buildFileDirectories() {
@@ -367,7 +378,7 @@ export namespace DashUploadUtils {
}
export interface ImageResizer {
- resizer?: sharp.Sharp;
+ width: number;
suffix: SizeSuffix;
}
@@ -392,13 +403,14 @@ export namespace DashUploadUtils {
const response = await AzureManager.UploadBase64ImageBlob(resolved, data);
source = `${AzureManager.BASE_STRING}/${resolved}`;
} else {
+ source = `${resolvedServerUrl}${clientPathToFile(Directory.images, resolved)}`;
+ source = serverPathToFile(Directory.images, resolved);
const error = await new Promise<Error | null>(resolve => {
writeFile(serverPathToFile(Directory.images, resolved), data, 'base64', resolve);
});
if (error !== null) {
return error;
}
- source = `${resolvedServerUrl}${clientPathToFile(Directory.images, resolved)}`;
}
}
let resolvedUrl: string;
@@ -463,12 +475,12 @@ export namespace DashUploadUtils {
* to appear in the new location
*/
export async function MoveParsedFile(file: formidable.File, destination: Directory, suffix: string | undefined = undefined, text?: string, duration?: number, targetName?: string): Promise<Upload.FileResponse> {
- const { path: sourcePath } = file;
- let name = targetName ?? path.basename(sourcePath);
+ const { filepath } = file;
+ let name = targetName ?? path.basename(filepath);
suffix && (name += suffix);
return new Promise(resolve => {
const destinationPath = serverPathToFile(destination, name);
- rename(sourcePath, destinationPath, error => {
+ rename(filepath, destinationPath, error => {
resolve({
source: file,
result: error
@@ -507,7 +519,7 @@ export namespace DashUploadUtils {
* @param cleanUp a boolean indicating if the files should be deleted after upload. True by default.
* @returns the accessPaths for the resized files.
*/
- export const UploadInspectedImage = async (metadata: Upload.InspectionResults, filename?: string, prefix = '', cleanUp = true): Promise<Upload.ImageInformation> => {
+ export const UploadInspectedImage = async (metadata: Upload.InspectionResults, filename: string, prefix = '', cleanUp = true): Promise<Upload.ImageInformation> => {
const { requestable, source, ...remaining } = metadata;
const resolved = filename || `${prefix}upload_${Utils.GenerateGuid()}.${remaining.contentType.split('/')[1].toLowerCase()}`;
const { images } = Directory;
@@ -540,7 +552,7 @@ export namespace DashUploadUtils {
writtenFiles = {};
}
} else {
- writtenFiles = await outputResizedImages(() => request(requestable), resolved, pathToDirectory(Directory.images));
+ writtenFiles = await outputResizedImages(metadata.source, resolved, pathToDirectory(Directory.images));
}
for (const suffix of Object.keys(writtenFiles)) {
information.accessPaths[suffix] = getAccessPaths(images, writtenFiles[suffix]);
@@ -586,66 +598,63 @@ export namespace DashUploadUtils {
force: true,
};
+ async function correctRotation(imgSourcePath: string) {
+ const buffer = fs.readFileSync(imgSourcePath);
+ try {
+ return (await autorotate.rotate(buffer, { quality: 30 })).buffer;
+ } catch (e) {
+ return buffer;
+ }
+ }
+
/**
* outputResizedImages takes in a readable stream and resizes the images according to the sizes defined at the top of this file.
*
* The new images will be saved to the server with the corresponding prefixes.
- * @param streamProvider a Stream of the image to process, taken from the /parsed_files location
+ * @param imgSourcePath file path for image being resized
* @param outputFileName the basename (No suffix) of the outputted file.
* @param outputDirectory the directory to output to, usually Directory.Images
* @returns a map with suffixes as keys and resized filenames as values.
*/
- export async function outputResizedImages(streamProvider: () => Stream | Promise<Stream>, outputFileName: string, outputDirectory: string) {
+ export async function outputResizedImages(imgSourcePath: string, outputFileName: string, outputDirectory: string) {
const writtenFiles: { [suffix: string]: string } = {};
- for (const { resizer, suffix } of resizers(path.extname(outputFileName))) {
- const outputPath = path.resolve(outputDirectory, (writtenFiles[suffix] = InjectSize(outputFileName, suffix)));
- await new Promise<void>(async (resolve, reject) => {
- const source = streamProvider();
- let readStream = source instanceof Promise ? await source : source;
- let error = false;
- if (resizer) {
- readStream = readStream.pipe(resizer.withMetadata()).on('error', async args => {
- error = true;
- if (error) {
- const source2 = streamProvider();
- let readStream2: Stream | undefined;
- readStream2 = source2 instanceof Promise ? await source2 : source2;
- readStream2?.pipe(createWriteStream(outputPath)).on('error', resolve).on('close', resolve);
- }
- });
- }
- !error && readStream?.pipe(createWriteStream(outputPath)).on('error', resolve).on('close', resolve);
+ const sizes = imageResampleSizes(path.extname(outputFileName));
+
+ const imgBuffer = await correctRotation(imgSourcePath);
+ const imgReadStream = new Duplex();
+ imgReadStream.push(imgBuffer);
+ imgReadStream.push(null);
+ const outputPath = (suffix: SizeSuffix) => path.resolve(outputDirectory, (writtenFiles[suffix] = InjectSize(outputFileName, suffix)));
+ await Promise.all(
+ sizes.filter(({ width }) => !width).map(({ suffix }) =>
+ new Promise<void>(res => imgReadStream.pipe(createWriteStream(outputPath(suffix))).on('close', res))
+ )); // prettier-ignore
+
+ return Jimp.read(imgBuffer)
+ .then(async (img: any) => {
+ await Promise.all( sizes.filter(({ width }) => width).map(({ width, suffix }) =>
+ img = img.resize(width, Jimp.AUTO).write(outputPath(suffix))
+ )); // prettier-ignore
+ return writtenFiles;
+ })
+ .catch((e: any) => {
+ console.log('ERROR' + e);
+ return writtenFiles;
});
- }
- return writtenFiles;
}
/**
* define the resizers to use
* @param ext the extension
- * @returns an array of resizer functions from sharp
+ * @returns an array of resize descriptions
*/
- function resizers(ext: string): DashUploadUtils.ImageResizer[] {
+ export function imageResampleSizes(ext: string): DashUploadUtils.ImageResizer[] {
return [
- { suffix: SizeSuffix.Original },
- ...Object.values(DashUploadUtils.Sizes).map(({ suffix, width }) => {
- let initial: sharp.Sharp | undefined = sharp({ failOnError: false }).resize(width, undefined, { withoutEnlargement: true });
- if (pngs.includes(ext)) {
- initial = initial.png(pngOptions);
- } else if (jpgs.includes(ext)) {
- initial = initial.jpeg();
- } else if (webps.includes(ext)) {
- initial = initial.webp();
- } else if (tiffs.includes(ext)) {
- initial = initial.tiff();
- } else if (ext === '.gif') {
- initial = undefined;
- }
- return {
- resizer: suffix === '_o' ? undefined : initial,
- suffix,
- };
- }),
+ { suffix: SizeSuffix.Original, width: 0 },
+ ...[...(AcceptableMedia.imageFormats.includes(ext.toLowerCase()) ? Object.values(DashUploadUtils.Sizes) : [])].map(({ suffix, width }) => ({
+ width,
+ suffix,
+ })),
];
}
}