aboutsummaryrefslogtreecommitdiff
path: root/src/server/DashUploadUtils.ts
diff options
context:
space:
mode:
Diffstat (limited to 'src/server/DashUploadUtils.ts')
-rw-r--r--src/server/DashUploadUtils.ts746
1 files changed, 365 insertions, 381 deletions
diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts
index 307aec6fc..08cea1de5 100644
--- a/src/server/DashUploadUtils.ts
+++ b/src/server/DashUploadUtils.ts
@@ -15,16 +15,15 @@ import { basename } from 'path';
import * as parse from 'pdf-parse';
import * as request from 'request-promise';
import { Duplex, Stream } from 'stream';
-import { filesDirectory, publicDirectory } from '.';
import { Utils } from '../Utils';
-import { Opt } from '../fields/Doc';
-import { ParsedPDF } from '../server/PdfTypes';
import { createIfNotExists } from './ActionUtilities';
import { AzureManager } from './ApiManagers/AzureManager';
-import { Directory, clientPathToFile, pathToDirectory, serverPathToFile } from './ApiManagers/UploadManager';
+import { ParsedPDF } from './PdfTypes';
import { AcceptableMedia, Upload } from './SharedMediaTypes';
+import { Directory, clientPathToFile, filesDirectory, pathToDirectory, publicDirectory, serverPathToFile } from './SocketData';
import { resolvedServerUrl } from './server_Initialization';
-const spawn = require('child_process').spawn;
+
+const { spawn } = require('child_process');
const { exec } = require('child_process');
const requestImageSize = require('../client/util/request-image-size');
@@ -42,7 +41,7 @@ export function InjectSize(filename: string, size: SizeSuffix) {
}
function isLocal() {
- return /Dash-Web[0-9]*[\\\/]src[\\\/]server[\\\/]public[\\\/](.*)/;
+ return /Dash-Web[0-9]*[\\/]src[\\/]server[\\/]public[\\/](.*)/;
}
function usingAzure() {
@@ -68,11 +67,21 @@ export namespace DashUploadUtils {
const size = 'content-length';
const type = 'content-type';
- const BLOBSTORE_URL = process.env.BLOBSTORE_URL;
- const RESIZE_FUNCTION_URL = process.env.RESIZE_FUNCTION_URL;
+ const { BLOBSTORE_URL, RESIZE_FUNCTION_URL } = process.env;
- const { imageFormats, videoFormats, applicationFormats, audioFormats } = AcceptableMedia; //TODO:glr
+ const { imageFormats, videoFormats, applicationFormats, audioFormats } = AcceptableMedia; // TODO:glr
+ export function fExists(name: string, destination: Directory) {
+ const destinationPath = serverPathToFile(destination, name);
+ return existsSync(destinationPath);
+ }
+
+ export function getAccessPaths(directory: Directory, fileName: string) {
+ return {
+ client: clientPathToFile(directory, fileName),
+ server: serverPathToFile(directory, fileName),
+ };
+ }
export async function concatVideos(filePaths: string[]): Promise<Upload.AccessPathInfo> {
// make a list of paths to create the ordered text file for ffmpeg
const inputListName = 'concat.txt';
@@ -80,14 +89,14 @@ export namespace DashUploadUtils {
// make a list of paths to create the ordered text file for ffmpeg
const filePathsText = filePaths.map(filePath => `file '${filePath}'`).join('\n');
// write the text file to the file system
- await new Promise<void>((res, reject) =>
+ await new Promise<void>((res, reject) => {
writeFile(textFilePath, filePathsText, err => {
if (err) {
reject();
console.log(err);
} else res();
- })
- );
+ });
+ });
// make output file name based on timestamp
const outputFileName = `output-${Utils.GenerateGuid()}.mp4`;
@@ -95,19 +104,19 @@ export namespace DashUploadUtils {
const outputFilePath = path.join(pathToDirectory(Directory.videos), outputFileName);
// concatenate the videos
- await new Promise((resolve, reject) =>
+ await new Promise((resolve, reject) => {
ffmpeg()
.input(textFilePath)
.inputOptions(['-f concat', '-safe 0'])
// .outputOptions('-c copy')
- //.videoCodec("copy")
+ // .videoCodec("copy")
.save(outputFilePath)
.on('error', (err: any) => {
console.log(err);
reject();
})
- .on('end', resolve)
- );
+ .on('end', resolve);
+ });
// delete concat.txt from the file system
unlinkSync(textFilePath);
@@ -135,270 +144,76 @@ export namespace DashUploadUtils {
};
}
- export function QueryYoutubeProgress(videoId: string, user?: Express.User) {
+ export const uploadProgress = new Map<string, string>();
+
+ export function QueryYoutubeProgress(videoId: string) {
// console.log(`PROGRESS:${videoId}`, (user as any)?.email);
return uploadProgress.get(videoId) ?? 'pending data upload';
}
- export let uploadProgress = new Map<string, string>();
-
- export function uploadYoutube(videoId: string, overwriteId: string): Promise<Upload.FileResponse> {
- return new Promise<Upload.FileResponse<Upload.FileInformation>>((res, rej) => {
- const name = videoId;
- const filepath = name.replace(/^-/, '__') + '.mp4';
- const finalPath = serverPathToFile(Directory.videos, filepath);
- if (existsSync(finalPath)) {
- uploadProgress.set(overwriteId, 'computing duration');
- exec(`yt-dlp -o ${finalPath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any, stderr: any) => {
- const time = Array.from(stdout.trim().split(':')).reverse();
- const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0);
- res(resolveExistingFile(name, filepath, Directory.videos, 'video/mp4', duration, undefined));
- });
- } else {
- uploadProgress.set(overwriteId, 'starting download');
- const ytdlp = spawn(`yt-dlp`, ['-o', filepath, `https://www.youtube.com/watch?v=${videoId}`, '--max-filesize', '100M', '-f', 'mp4']);
-
- ytdlp.stdout.on('data', (data: any) => uploadProgress.set(overwriteId, data.toString()));
-
- let errors = '';
- ytdlp.stderr.on('data', (data: any) => {
- uploadProgress.set(overwriteId, 'error:' + data.toString());
- errors = data.toString();
- });
-
- ytdlp.on('exit', function (code: any) {
- if (code) {
- res({
- source: {
- size: 0,
- filepath: name,
- originalFilename: name,
- newFilename: name,
- mimetype: 'video',
- hashAlgorithm: 'md5',
- toJSON: () => ({ newFilename: name, filepath, mimetype: 'video', mtime: new Date(), size: 0, length: 0, originalFilename: name }),
- },
- result: { name: 'failed youtube query', message: `Could not archive video. ${code ? errors : uploadProgress.get(videoId)}` },
- });
- } else {
- uploadProgress.set(overwriteId, 'computing duration');
- exec(`yt-dlp-o ${filepath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any, stderr: any) => {
- const time = Array.from(stdout.trim().split(':')).reverse();
- const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0);
- const data = { size: 0, filepath, name, mimetype: 'video', originalFilename: name, newFilename: name, hashAlgorithm: 'md5' as 'md5', type: 'video/mp4' };
- const file = { ...data, toJSON: () => ({ ...data, length: 0, filename: data.filepath.replace(/.*\//, ''), mtime: new Date(), toJson: () => undefined as any }) };
- MoveParsedFile(file, Directory.videos).then(output => {
- console.log('OUTPUT = ' + output);
- res(output);
- });
- });
- }
+ /**
+ * Basically just a wrapper around rename, which 'deletes'
+ * the file at the old path and 'moves' it to the new one. For simplicity, the
+ * caller just has to pass in the name of the target directory, and this function
+ * will resolve the actual target path from that.
+ * @param file The file to move
+ * @param destination One of the specific media asset directories into which to move it
+ * @param suffix If the file doesn't have a suffix and you want to provide it one
+ * to appear in the new location
+ */
+ export async function MoveParsedFile(file: formidable.File, destination: Directory, suffix?: string, text?: string, duration?: number, targetName?: string): Promise<Upload.FileResponse> {
+ const { filepath } = file;
+ let name = targetName ?? path.basename(filepath);
+ suffix && (name += suffix);
+ return new Promise(resolve => {
+ const destinationPath = serverPathToFile(destination, name);
+ rename(filepath, destinationPath, error => {
+ resolve({
+ source: file,
+ result: error ?? {
+ accessPaths: {
+ agnostic: getAccessPaths(destination, name),
+ },
+ rawText: text,
+ duration,
+ },
});
- }
+ });
});
}
- export async function upload(file: File, overwriteGuid?: string): Promise<Upload.FileResponse> {
- const isAzureOn = usingAzure();
- const { mimetype: type, filepath, originalFilename } = file;
- const types = type?.split('/') ?? [];
- // uploadProgress.set(overwriteGuid ?? name, 'uploading'); // If the client sent a guid it uses to track upload progress, use that guid. Otherwise, use the file's name.
-
- const category = types[0];
- let format = `.${types[1]}`;
- console.log(green(`Processing upload of file (${originalFilename}) and format (${format}) with upload type (${type}) in category (${category}).`));
-
- switch (category) {
- case 'image':
- if (imageFormats.includes(format)) {
- const result = await UploadImage(filepath, basename(filepath));
- return { source: file, result };
- }
- fs.unlink(filepath, () => {});
- return { source: file, result: { name: 'Unsupported image format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .jpg` } };
- case 'video':
- if (format.includes('x-matroska')) {
- console.log('case video');
- await new Promise(res =>
- ffmpeg(file.filepath)
- .videoCodec('copy') // this will copy the data instead of reencode it
- .save(file.filepath.replace('.mkv', '.mp4'))
- .on('end', res)
- .on('error', (e: any) => console.log(e))
- );
- file.filepath = file.filepath.replace('.mkv', '.mp4');
- format = '.mp4';
- }
- if (format.includes('quicktime')) {
- let abort = false;
- await new Promise<void>(res =>
- ffmpeg.ffprobe(file.filepath, (err: any, metadata: any) => {
- if (metadata.streams.some((stream: any) => stream.codec_name === 'hevc')) {
- abort = true;
- }
- res();
- })
- );
- if (abort) {
- // bcz: instead of aborting, we could convert the file using the code below to an mp4. Problem is that this takes a long time and will clog up the server.
- // await new Promise(res =>
- // ffmpeg(file.path)
- // .videoCodec('libx264') // this will copy the data instead of reencode it
- // .audioCodec('mp2')
- // .save(file.path.replace('.MOV', '.mp4').replace('.mov', '.mp4'))
- // .on('end', res)
- // );
- // file.path = file.path.replace('.mov', '.mp4').replace('.MOV', '.mp4');
- // format = '.mp4';
- fs.unlink(filepath, () => {});
- return { source: file, result: { name: 'Unsupported video format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .mp4` } };
- }
- }
- if (videoFormats.includes(format) || format.includes('.webm')) {
- return MoveParsedFile(file, Directory.videos);
- }
- fs.unlink(filepath, () => {});
- return { source: file, result: { name: 'Unsupported video format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .mp4` } };
- case 'application':
- if (applicationFormats.includes(format)) {
- const val = UploadPdf(file);
- if (val) return val;
- }
- case 'audio':
- const components = format.split(';');
- if (components.length > 1) {
- format = components[0];
- }
- if (audioFormats.includes(format)) {
- return UploadAudio(file, format);
- }
- fs.unlink(filepath, () => {});
- return { source: file, result: { name: 'Unsupported audio format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .mp3` } };
- case 'text':
- if (types[1] == 'csv') {
- return UploadCsv(file);
- }
- }
-
- console.log(red(`Ignoring unsupported file (${originalFilename}) with upload type (${type}).`));
- fs.unlink(filepath, () => {});
- return { source: file, result: new Error(`Could not upload unsupported file (${originalFilename}) with upload type (${type}).`) };
- }
-
- async function UploadPdf(file: File) {
- const fileKey = (await md5File(file.filepath)) + '.pdf';
- const textFilename = `${fileKey.substring(0, fileKey.length - 4)}.txt`;
- if (fExists(fileKey, Directory.pdfs) && fExists(textFilename, Directory.text)) {
- fs.unlink(file.filepath, () => {});
- return new Promise<Upload.FileResponse>(res => {
- const textFilename = `${fileKey.substring(0, fileKey.length - 4)}.txt`;
- const readStream = createReadStream(serverPathToFile(Directory.text, textFilename));
- var rawText = '';
- readStream
- .on('data', chunk => (rawText += chunk.toString())) //
- .on('end', () => res(resolveExistingFile(file.originalFilename ?? '', fileKey, Directory.pdfs, file.mimetype, undefined, rawText)));
- });
- }
- const dataBuffer = readFileSync(file.filepath);
- const result: ParsedPDF | any = await parse(dataBuffer).catch((e: any) => e);
- if (!result.code) {
- await new Promise<void>((resolve, reject) => {
- const writeStream = createWriteStream(serverPathToFile(Directory.text, textFilename));
- writeStream.write(result?.text, error => (error ? reject(error) : resolve()));
+ const parseExifData = async (source: string) => {
+ const image = await request.get(source, { encoding: null });
+ const { /* data, */ error } = await new Promise<{ data: any; error: any }>(resolve => {
+ // eslint-disable-next-line no-new
+ new ExifImage({ image }, (exifError, data) => {
+ const reason = (exifError as any)?.code;
+ resolve({ data, error: reason });
});
- return MoveParsedFile(file, Directory.pdfs, undefined, result?.text, undefined, fileKey);
- }
- return { source: file, result: { name: 'faile pdf pupload', message: `Could not upload (${file.originalFilename}).${result.message}` } };
- }
-
- async function UploadCsv(file: File) {
- const { filepath: sourcePath } = file;
- // read the file as a string
- const data = readFileSync(sourcePath, 'utf8');
- // split the string into an array of lines
- return MoveParsedFile(file, Directory.csv, undefined, data);
- // console.log(csvParser(data));
- }
-
- const manualSuffixes = ['.webm'];
-
- async function UploadAudio(file: File, format: string) {
- const suffix = manualSuffixes.includes(format) ? format : undefined;
- return MoveParsedFile(file, Directory.audio, suffix);
- }
-
- /**
- * Uploads an image specified by the @param source to Dash's /public/files/
- * directory, and returns information generated during that upload
- *
- * @param {string} source is either the absolute path of an already uploaded image or
- * the url of a remote image
- * @param {string} filename dictates what to call the image. If not specified,
- * the name {@param prefix}_upload_{GUID}
- * @param {string} prefix is a string prepended to the generated image name in the
- * event that @param filename is not specified
- *
- * @returns {ImageUploadInformation | Error} This method returns
- * 1) the paths to the uploaded images (plural due to resizing)
- * 2) the exif data embedded in the image, or the error explaining why exif couldn't be parsed
- * 3) the size of the image, in bytes (4432130)
- * 4) the content type of the image, i.e. image/(jpeg | png | ...)
- */
- export const UploadImage = async (source: string, filename?: string, prefix: string = ''): Promise<Upload.ImageInformation | Error> => {
- const metadata = await InspectImage(source);
- if (metadata instanceof Error) {
- return { name: metadata.name, message: metadata.message };
- }
- const outputFile = filename || metadata.filename || '';
-
- return UploadInspectedImage(metadata, outputFile, prefix);
+ });
+ return error ? { data: undefined, error } : { data: await exifr.parse(image), error };
};
-
- export async function buildFileDirectories() {
- if (!existsSync(publicDirectory)) {
- console.error('\nPlease ensure that the following directory exists...\n');
- console.log(publicDirectory);
- process.exit(0);
- }
- if (!existsSync(filesDirectory)) {
- console.error('\nPlease ensure that the following directory exists...\n');
- console.log(filesDirectory);
- process.exit(0);
- }
- const pending = Object.keys(Directory).map(sub => createIfNotExists(`${filesDirectory}/${sub}`));
- return Promise.all(pending);
- }
-
- export interface RequestedImageSize {
- width: number;
- height: number;
- type: string;
- }
-
- export interface ImageResizer {
- width: number;
- suffix: SizeSuffix;
- }
-
/**
* Based on the url's classification as local or remote, gleans
* as much information as possible about the specified image
*
* @param source is the path or url to the image in question
*/
- export const InspectImage = async (source: string): Promise<Upload.InspectionResults | Error> => {
- let rawMatches: RegExpExecArray | null;
+ export const InspectImage = async (sourceIn: string): Promise<Upload.InspectionResults | Error> => {
+ let source = sourceIn;
+ const rawMatches = /^data:image\/([a-z]+);base64,(.*)/.exec(source);
let filename: string | undefined;
/**
* Just more edge case handling: this if clause handles the case where an image onto the canvas that
* is represented by a base64 encoded data uri, rather than a proper file. We manually write it out
* to the server and then carry on as if it had been put there by the Formidable form / file parser.
*/
- if ((rawMatches = /^data:image\/([a-z]+);base64,(.*)/.exec(source)) !== null) {
+ if (rawMatches !== null) {
const [ext, data] = rawMatches.slice(1, 3);
- const resolved = (filename = `upload_${Utils.GenerateGuid()}.${ext}`);
+ filename = `upload_${Utils.GenerateGuid()}.${ext}`;
+ const resolved = filename;
if (usingAzure()) {
- const response = await AzureManager.UploadBase64ImageBlob(resolved, data);
+ await AzureManager.UploadBase64ImageBlob(resolved, data);
source = `${AzureManager.BASE_STRING}/${resolved}`;
} else {
source = `${resolvedServerUrl}${clientPathToFile(Directory.images, resolved)}`;
@@ -438,7 +253,7 @@ export namespace DashUploadUtils {
// Use the request library to parse out file level image information in the headers
const { headers } = await new Promise<any>((resolve, reject) => {
- return request.head(resolvedUrl, (error, res) => (error ? reject(error) : resolve(res)));
+ request.head(resolvedUrl, (error, res) => (error ? reject(error) : resolve(res)));
}).catch(e => {
console.log('Error processing headers: ', e);
});
@@ -449,6 +264,7 @@ export namespace DashUploadUtils {
// Bundle up the information into an object
return {
source,
+ // eslint-disable-next-line radix
contentSize: parseInt(headers[size]),
contentType: headers[type],
nativeWidth,
@@ -462,49 +278,71 @@ export namespace DashUploadUtils {
}
};
+ async function correctRotation(imgSourcePath: string) {
+ const buffer = fs.readFileSync(imgSourcePath);
+ try {
+ return (await autorotate.rotate(buffer, { quality: 30 })).buffer;
+ } catch (e) {
+ return buffer;
+ }
+ }
+
/**
- * Basically just a wrapper around rename, which 'deletes'
- * the file at the old path and 'moves' it to the new one. For simplicity, the
- * caller just has to pass in the name of the target directory, and this function
- * will resolve the actual target path from that.
- * @param file The file to move
- * @param destination One of the specific media asset directories into which to move it
- * @param suffix If the file doesn't have a suffix and you want to provide it one
- * to appear in the new location
+ * define the resizers to use
+ * @param ext the extension
+ * @returns an array of resize descriptions
*/
- export async function MoveParsedFile(file: formidable.File, destination: Directory, suffix: string | undefined = undefined, text?: string, duration?: number, targetName?: string): Promise<Upload.FileResponse> {
- const { filepath } = file;
- let name = targetName ?? path.basename(filepath);
- suffix && (name += suffix);
- return new Promise(resolve => {
- const destinationPath = serverPathToFile(destination, name);
- rename(filepath, destinationPath, error => {
- resolve({
- source: file,
- result: error
- ? error
- : {
- accessPaths: {
- agnostic: getAccessPaths(destination, name),
- },
- rawText: text,
- duration,
- },
- });
- });
- });
+ export function imageResampleSizes(ext: string): DashUploadUtils.ImageResizer[] {
+ return [
+ { suffix: SizeSuffix.Original, width: 0 },
+ ...[...(AcceptableMedia.imageFormats.includes(ext.toLowerCase()) ? Object.values(DashUploadUtils.Sizes) : [])].map(({ suffix, width }) => ({
+ width,
+ suffix,
+ })),
+ ];
}
- export function fExists(name: string, destination: Directory) {
- const destinationPath = serverPathToFile(destination, name);
- return existsSync(destinationPath);
- }
+ /**
+ * outputResizedImages takes in a readable stream and resizes the images according to the sizes defined at the top of this file.
+ *
+ * The new images will be saved to the server with the corresponding prefixes.
+ * @param imgSourcePath file path for image being resized
+ * @param outputFileName the basename (No suffix) of the outputted file.
+ * @param outputDirectory the directory to output to, usually Directory.Images
+ * @returns a map with suffixes as keys and resized filenames as values.
+ */
+ export async function outputResizedImages(imgSourcePath: string, outputFileName: string, outputDirectory: string) {
+ const writtenFiles: { [suffix: string]: string } = {};
+ const sizes = imageResampleSizes(path.extname(outputFileName));
- export function getAccessPaths(directory: Directory, fileName: string) {
- return {
- client: clientPathToFile(directory, fileName),
- server: serverPathToFile(directory, fileName),
+ const imgBuffer = await correctRotation(imgSourcePath);
+ const imgReadStream = new Duplex();
+ imgReadStream.push(imgBuffer);
+ imgReadStream.push(null);
+ const outputPath = (suffix: SizeSuffix) => {
+ writtenFiles[suffix] = InjectSize(outputFileName, suffix);
+ return path.resolve(outputDirectory, writtenFiles[suffix]);
};
+ await Promise.all(
+ sizes.filter(({ width }) => !width).map(({ suffix }) =>
+ new Promise<void>(res => {
+ imgReadStream.pipe(createWriteStream(outputPath(suffix))).on('close', res);
+ })
+ )); // prettier-ignore
+
+ return Jimp.read(imgBuffer)
+ .then(async (imgIn: any) => {
+ let img = imgIn;
+ await Promise.all( sizes.filter(({ width }) => width).map(({ width, suffix }) => {
+ img = img.resize(width, Jimp.AUTO).write(outputPath(suffix));
+ return img;
+ } )); // prettier-ignore
+ return writtenFiles;
+ })
+ .catch((e: any) => {
+ console.log('ERROR' + e);
+ return writtenFiles;
+ });
}
/**
@@ -555,119 +393,265 @@ export namespace DashUploadUtils {
} catch (e) {
// input is a blob or other, try reading it to create a metadata source file.
const reqSource = request(metadata.source);
- let readStream: Stream = reqSource instanceof Promise ? await reqSource : reqSource;
+ const readStream: Stream = reqSource instanceof Promise ? await reqSource : reqSource;
const readSource = `${prefix}upload_${Utils.GenerateGuid()}.${metadata.contentType.split('/')[1].toLowerCase()}`;
- await new Promise<void>((res, rej) =>
+ await new Promise<void>((res, rej) => {
readStream
.pipe(createWriteStream(readSource))
.on('close', () => res())
- .on('error', () => rej())
- );
+ .on('error', () => rej());
+ });
writtenFiles = await outputResizedImages(readSource, resolved, pathToDirectory(Directory.images));
- fs.unlink(readSource, err => console.log("Couldn't unlink temporary image file:" + readSource));
+ fs.unlink(readSource, err => console.log("Couldn't unlink temporary image file:" + readSource, err));
}
}
- for (const suffix of Object.keys(writtenFiles)) {
+ Array.from(Object.keys(writtenFiles)).forEach(suffix => {
information.accessPaths[suffix] = getAccessPaths(images, writtenFiles[suffix]);
- }
+ });
if (isLocal().test(source) && cleanUp) {
unlinkSync(source);
}
return information;
};
- const bufferConverterRec = (layer: any) => {
- for (const key of Object.keys(layer)) {
- const val: any = layer[key];
- if (val instanceof Buffer) {
- layer[key] = val.toString();
- } else if (Array.isArray(val) && typeof val[0] === 'number') {
- layer[key] = Buffer.from(val).toString();
- } else if (typeof val === 'object') {
- bufferConverterRec(val);
- }
+ /**
+ * Uploads an image specified by the @param source to Dash's /public/files/
+ * directory, and returns information generated during that upload
+ *
+ * @param {string} source is either the absolute path of an already uploaded image or
+ * the url of a remote image
+ * @param {string} filename dictates what to call the image. If not specified,
+ * the name {@param prefix}_upload_{GUID}
+ * @param {string} prefix is a string prepended to the generated image name in the
+ * event that @param filename is not specified
+ *
+ * @returns {ImageUploadInformation | Error} This method returns
+ * 1) the paths to the uploaded images (plural due to resizing)
+ * 2) the exif data embedded in the image, or the error explaining why exif couldn't be parsed
+ * 3) the size of the image, in bytes (4432130)
+ * 4) the content type of the image, i.e. image/(jpeg | png | ...)
+ */
+ export const UploadImage = async (source: string, filename?: string, prefix: string = ''): Promise<Upload.ImageInformation | Error> => {
+ const metadata = await InspectImage(source);
+ if (metadata instanceof Error) {
+ return { name: metadata.name, message: metadata.message };
}
+ const outputFile = filename || metadata.filename || '';
+
+ return UploadInspectedImage(metadata, outputFile, prefix);
};
- const parseExifData = async (source: string) => {
- const image = await request.get(source, { encoding: null });
- const { data, error } = await new Promise<{ data: any; error: any }>(resolve => {
- new ExifImage({ image }, (error, data) => {
- let reason: Opt<string> = undefined;
- if (error) {
- reason = (error as any).code;
- }
- resolve({ data, error: reason });
- });
+ export function uploadYoutube(videoId: string, overwriteId: string): Promise<Upload.FileResponse> {
+ return new Promise<Upload.FileResponse<Upload.FileInformation>>(res => {
+ const name = videoId;
+ const filepath = name.replace(/^-/, '__') + '.mp4';
+ const finalPath = serverPathToFile(Directory.videos, filepath);
+ if (existsSync(finalPath)) {
+ uploadProgress.set(overwriteId, 'computing duration');
+ exec(`yt-dlp -o ${finalPath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any /* , stderr: any */) => {
+ const time = Array.from(stdout.trim().split(':')).reverse();
+ const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0);
+ res(resolveExistingFile(name, filepath, Directory.videos, 'video/mp4', duration, undefined));
+ });
+ } else {
+ uploadProgress.set(overwriteId, 'starting download');
+ const ytdlp = spawn(`yt-dlp`, ['-o', filepath, `https://www.youtube.com/watch?v=${videoId}`, '--max-filesize', '100M', '-f', 'mp4']);
+
+ ytdlp.stdout.on('data', (data: any) => uploadProgress.set(overwriteId, data.toString()));
+
+ let errors = '';
+ ytdlp.stderr.on('data', (data: any) => {
+ uploadProgress.set(overwriteId, 'error:' + data.toString());
+ errors = data.toString();
+ });
+
+ ytdlp.on('exit', (code: any) => {
+ if (code) {
+ res({
+ source: {
+ size: 0,
+ filepath: name,
+ originalFilename: name,
+ newFilename: name,
+ mimetype: 'video',
+ hashAlgorithm: 'md5',
+ toJSON: () => ({ newFilename: name, filepath, mimetype: 'video', mtime: new Date(), size: 0, length: 0, originalFilename: name }),
+ },
+ result: { name: 'failed youtube query', message: `Could not archive video. ${code ? errors : uploadProgress.get(videoId)}` },
+ });
+ } else {
+ uploadProgress.set(overwriteId, 'computing duration');
+ exec(`yt-dlp-o ${filepath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (/* error: any, stdout: any, stderr: any */) => {
+ // const time = Array.from(stdout.trim().split(':')).reverse();
+ // const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0);
+ const data = { size: 0, filepath, name, mimetype: 'video', originalFilename: name, newFilename: name, hashAlgorithm: 'md5' as 'md5', type: 'video/mp4' };
+ const file = { ...data, toJSON: () => ({ ...data, length: 0, filename: data.filepath.replace(/.*\//, ''), mtime: new Date(), toJson: () => undefined as any }) };
+ MoveParsedFile(file, Directory.videos).then(output => res(output));
+ });
+ }
+ });
+ }
});
- //data && bufferConverterRec(data);
- return error ? { data: undefined, error } : { data: await exifr.parse(image), error };
- };
+ }
+ const manualSuffixes = ['.webm'];
- const { pngs, jpgs, webps, tiffs } = AcceptableMedia;
- const pngOptions = {
- compressionLevel: 9,
- adaptiveFiltering: true,
- force: true,
- };
+ async function UploadAudio(file: File, format: string) {
+ const suffix = manualSuffixes.includes(format) ? format : undefined;
+ return MoveParsedFile(file, Directory.audio, suffix);
+ }
- async function correctRotation(imgSourcePath: string) {
- const buffer = fs.readFileSync(imgSourcePath);
- try {
- return (await autorotate.rotate(buffer, { quality: 30 })).buffer;
- } catch (e) {
- return buffer;
+ async function UploadPdf(file: File) {
+ const fileKey = (await md5File(file.filepath)) + '.pdf';
+ const textFilename = `${fileKey.substring(0, fileKey.length - 4)}.txt`;
+ if (fExists(fileKey, Directory.pdfs) && fExists(textFilename, Directory.text)) {
+ fs.unlink(file.filepath, () => {});
+ return new Promise<Upload.FileResponse>(res => {
+ const pdfTextFilename = `${fileKey.substring(0, fileKey.length - 4)}.txt`;
+ const readStream = createReadStream(serverPathToFile(Directory.text, pdfTextFilename));
+ let rawText = '';
+ readStream
+ .on('data', chunk => {
+ rawText += chunk.toString();
+ })
+ .on('end', () => res(resolveExistingFile(file.originalFilename ?? '', fileKey, Directory.pdfs, file.mimetype, undefined, rawText)));
+ });
}
+ const dataBuffer = readFileSync(file.filepath);
+ const result: ParsedPDF | any = await parse(dataBuffer).catch((e: any) => e);
+ if (!result.code) {
+ await new Promise<void>((resolve, reject) => {
+ const writeStream = createWriteStream(serverPathToFile(Directory.text, textFilename));
+ writeStream.write(result?.text, error => (error ? reject(error) : resolve()));
+ });
+ return MoveParsedFile(file, Directory.pdfs, undefined, result?.text, undefined, fileKey);
+ }
+ return { source: file, result: { name: 'faile pdf pupload', message: `Could not upload (${file.originalFilename}).${result.message}` } };
}
- /**
- * outputResizedImages takes in a readable stream and resizes the images according to the sizes defined at the top of this file.
- *
- * The new images will be saved to the server with the corresponding prefixes.
- * @param imgSourcePath file path for image being resized
- * @param outputFileName the basename (No suffix) of the outputted file.
- * @param outputDirectory the directory to output to, usually Directory.Images
- * @returns a map with suffixes as keys and resized filenames as values.
- */
- export async function outputResizedImages(imgSourcePath: string, outputFileName: string, outputDirectory: string) {
- const writtenFiles: { [suffix: string]: string } = {};
- const sizes = imageResampleSizes(path.extname(outputFileName));
+ async function UploadCsv(file: File) {
+ const { filepath: sourcePath } = file;
+ // read the file as a string
+ const data = readFileSync(sourcePath, 'utf8');
+ // split the string into an array of lines
+ return MoveParsedFile(file, Directory.csv, undefined, data);
+ // console.log(csvParser(data));
+ }
- const imgBuffer = await correctRotation(imgSourcePath);
- const imgReadStream = new Duplex();
- imgReadStream.push(imgBuffer);
- imgReadStream.push(null);
- const outputPath = (suffix: SizeSuffix) => path.resolve(outputDirectory, (writtenFiles[suffix] = InjectSize(outputFileName, suffix)));
- await Promise.all(
- sizes.filter(({ width }) => !width).map(({ suffix }) =>
- new Promise<void>(res => imgReadStream.pipe(createWriteStream(outputPath(suffix))).on('close', res))
- )); // prettier-ignore
+ export async function upload(file: File /* , overwriteGuid?: string */): Promise<Upload.FileResponse> {
+ // const isAzureOn = usingAzure();
+ const { mimetype, filepath, originalFilename } = file;
+ const types = mimetype?.split('/') ?? [];
+ // uploadProgress.set(overwriteGuid ?? name, 'uploading'); // If the client sent a guid it uses to track upload progress, use that guid. Otherwise, use the file's name.
- return Jimp.read(imgBuffer)
- .then(async (img: any) => {
- await Promise.all( sizes.filter(({ width }) => width).map(({ width, suffix }) =>
- img = img.resize(width, Jimp.AUTO).write(outputPath(suffix))
- )); // prettier-ignore
- return writtenFiles;
- })
- .catch((e: any) => {
- console.log('ERROR' + e);
- return writtenFiles;
- });
+ const category = types[0];
+ let format = `.${types[1]}`;
+ console.log(green(`Processing upload of file (${originalFilename}) and format (${format}) with upload type (${mimetype}) in category (${category}).`));
+
+ switch (category) {
+ case 'image':
+ if (imageFormats.includes(format)) {
+ const result = await UploadImage(filepath, basename(filepath));
+ return { source: file, result };
+ }
+ fs.unlink(filepath, () => {});
+ return { source: file, result: { name: 'Unsupported image format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .jpg` } };
+ case 'video': {
+ const vidFile = file;
+ if (format.includes('x-matroska')) {
+ await new Promise(res => {
+ ffmpeg(vidFile.filepath)
+ .videoCodec('copy') // this will copy the data instead of reencode it
+ .save(vidFile.filepath.replace('.mkv', '.mp4'))
+ .on('end', res)
+ .on('error', (e: any) => console.log(e));
+ });
+ vidFile.filepath = vidFile.filepath.replace('.mkv', '.mp4');
+ format = '.mp4';
+ }
+ if (format.includes('quicktime')) {
+ let abort = false;
+ await new Promise<void>(res => {
+ ffmpeg.ffprobe(vidFile.filepath, (err: any, metadata: any) => {
+ if (metadata.streams.some((stream: any) => stream.codec_name === 'hevc')) {
+ abort = true;
+ }
+ res();
+ });
+ });
+ if (abort) {
+ // bcz: instead of aborting, we could convert the file using the code below to an mp4. Problem is that this takes a long time and will clog up the server.
+ // await new Promise(res =>
+ // ffmpeg(file.path)
+ // .videoCodec('libx264') // this will copy the data instead of reencode it
+ // .audioCodec('mp2')
+ // .save(vidFile.path.replace('.MOV', '.mp4').replace('.mov', '.mp4'))
+ // .on('end', res)
+ // );
+ // vidFile.path = vidFile.path.replace('.mov', '.mp4').replace('.MOV', '.mp4');
+ // format = '.mp4';
+ fs.unlink(filepath, () => {});
+ return { source: file, result: { name: 'Unsupported video format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .mp4` } };
+ }
+ }
+ if (videoFormats.includes(format) || format.includes('.webm')) {
+ return MoveParsedFile(vidFile, Directory.videos);
+ }
+ fs.unlink(filepath, () => {});
+ return { source: vidFile, result: { name: 'Unsupported video format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .mp4` } };
+ }
+ case 'application':
+ if (applicationFormats.includes(format)) {
+ const val = UploadPdf(file);
+ if (val) return val;
+ }
+ break;
+ case 'audio': {
+ const components = format.split(';');
+ if (components.length > 1) {
+ [format] = components;
+ }
+ if (audioFormats.includes(format)) {
+ return UploadAudio(file, format);
+ }
+ fs.unlink(filepath, () => {});
+ return { source: file, result: { name: 'Unsupported audio format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .mp3` } };
+ }
+ case 'text':
+ if (types[1] === 'csv') {
+ return UploadCsv(file);
+ }
+ break;
+ default:
+ }
+
+ console.log(red(`Ignoring unsupported file (${originalFilename}) with upload type (${mimetype}).`));
+ fs.unlink(filepath, () => {});
+ return { source: file, result: new Error(`Could not upload unsupported file (${originalFilename}) with upload type (${mimetype}).`) };
}
- /**
- * define the resizers to use
- * @param ext the extension
- * @returns an array of resize descriptions
- */
- export function imageResampleSizes(ext: string): DashUploadUtils.ImageResizer[] {
- return [
- { suffix: SizeSuffix.Original, width: 0 },
- ...[...(AcceptableMedia.imageFormats.includes(ext.toLowerCase()) ? Object.values(DashUploadUtils.Sizes) : [])].map(({ suffix, width }) => ({
- width,
- suffix,
- })),
- ];
+ export async function buildFileDirectories() {
+ if (!existsSync(publicDirectory)) {
+ console.error('\nPlease ensure that the following directory exists...\n');
+ console.log(publicDirectory);
+ process.exit(0);
+ }
+ if (!existsSync(filesDirectory)) {
+ console.error('\nPlease ensure that the following directory exists...\n');
+ console.log(filesDirectory);
+ process.exit(0);
+ }
+ const pending = Object.keys(Directory).map(sub => createIfNotExists(`${filesDirectory}/${sub}`));
+ return Promise.all(pending);
+ }
+
+ export interface RequestedImageSize {
+ width: number;
+ height: number;
+ type: string;
+ }
+
+ export interface ImageResizer {
+ width: number;
+ suffix: SizeSuffix;
}
}