aboutsummaryrefslogtreecommitdiff
path: root/src/server/DashUploadUtils.ts
blob: 1e55a885aef653936ad17adc460262c91df1f78d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
import axios from 'axios';
import { exec, spawn } from 'child_process';
import { green, red } from 'colors';
import { ExifData, ExifImage } from 'exif';
import * as exifr from 'exifr';
import * as ffmpeg from 'fluent-ffmpeg';
import * as formidable from 'formidable';
import { File } from 'formidable';
import * as fs from 'fs';
import { createReadStream, createWriteStream, existsSync, readFileSync, rename, unlinkSync, writeFile } from 'fs';
import { Jimp } from 'jimp';
import * as md5File from 'md5-file';
import * as path from 'path';
import { basename } from 'path';
import * as parse from 'pdf-parse';
import * as request from 'request-promise';
import { Duplex, Stream } from 'stream';
import { Utils } from '../Utils';
import { createIfNotExists } from './ActionUtilities';
import { AzureManager } from './ApiManagers/AzureManager';
import { AcceptableMedia, Upload } from './SharedMediaTypes';
import { Directory, clientPathToFile, filesDirectory, pathToDirectory, publicDirectory, serverPathToFile } from './SocketData';
import { resolvedServerUrl } from './server_Initialization';

import { Worker, isMainThread, parentPort } from 'worker_threads';

// Create an array to store worker threads
enum workertasks {
    JIMP = 'jimp',
}
const JimpWorker: Worker | undefined = isMainThread ? new Worker(__filename) : undefined;
export const workerResample = (imgSourcePath: string, outputPath: string, origSuffix: SizeSuffix, unlinkSource: boolean) => {
    JimpWorker?.postMessage({ task: workertasks.JIMP, imgSourcePath, outputPath, origSuffix, unlinkSource });
};

if (isMainThread) {
    // main thread code if needed ...
} else {
    // Worker thread code - Listens for messages from the main thread
    parentPort?.on('message', message => {
        switch (message.task) {
            case workertasks.JIMP:
                return workerResampleImage(message);
            default:
        }
    });

    async function workerResampleImage(message: { imgSourcePath: string; outputPath: string; origSuffix: string; unlinkSource: boolean }) {
        const { imgSourcePath, outputPath, origSuffix, unlinkSource } = message;
        const sizes = !origSuffix ? [{ width: 400, suffix: SizeSuffix.Medium }] : DashUploadUtils.imageResampleSizes(path.extname(imgSourcePath));
        // prettier-ignore
        Jimp.read(imgSourcePath)
            .then(img => 
                sizes.forEach(({ width, suffix }) =>
                    img.resize({ w: width || img.bitmap.width })
                       .write(InjectSize(outputPath, suffix) as `${string}.${string}`)
                ))
            .catch(e => console.log('Error Jimp:', e))
            .finally(() => unlinkSource && unlinkSync(imgSourcePath));
    }
}

// eslint-disable-next-line @typescript-eslint/no-var-requires
const requestImageSize = require('../client/util/request-image-size');

export enum SizeSuffix {
    Small = '_s',
    Medium = '_m',
    Large = '_l',
    Original = '_o',
    None = '',
}

export function InjectSize(filename: string, size: SizeSuffix) {
    const extension = path.extname(filename).toLowerCase();
    return filename.substring(0, filename.length - extension.length) + size + extension;
}

function isLocal() {
    return /Dash-Web[0-9]*[\\/]src[\\/]server[\\/]public[\\/](.*)/;
}

function usingAzure() {
    return process.env.USE_AZURE === 'true';
}

export namespace DashUploadUtils {
    export interface Size {
        width: number;
        suffix: SizeSuffix;
    }

    export const Sizes: { [size: string]: Size } = {
        LARGE: { width: 800, suffix: SizeSuffix.Large },
        MEDIUM: { width: 400, suffix: SizeSuffix.Medium },
        SMALL: { width: 100, suffix: SizeSuffix.Small },
    };

    export function validateExtension(url: string) {
        return AcceptableMedia.imageFormats.includes(path.extname(url).toLowerCase());
    }

    const size = 'content-length';
    const type = 'content-type';

    const { BLOBSTORE_URL, RESIZE_FUNCTION_URL } = process.env;

    const { imageFormats, videoFormats, applicationFormats, audioFormats } = AcceptableMedia; // TODO:glr

    export function fExists(name: string, destination: Directory) {
        const destinationPath = serverPathToFile(destination, name);
        return existsSync(destinationPath);
    }

    export function getAccessPaths(directory: Directory, fileName: string) {
        return {
            client: clientPathToFile(directory, fileName),
            server: serverPathToFile(directory, fileName),
        };
    }
    export async function concatVideos(filePaths: string[]): Promise<Upload.AccessPathInfo> {
        // make a list of paths to create the ordered text file for ffmpeg
        const inputListName = 'concat.txt';
        const textFilePath = path.join(filesDirectory, inputListName);
        // make a list of paths to create the ordered text file for ffmpeg
        const filePathsText = filePaths.map(filePath => `file '${filePath}'`).join('\n');
        // write the text file to the file system
        await new Promise<void>((res, reject) => {
            writeFile(textFilePath, filePathsText, err => {
                if (err) {
                    reject();
                    console.log(err);
                } else res();
            });
        });

        // make output file name based on timestamp
        const outputFileName = `output-${Utils.GenerateGuid()}.mp4`;
        // create the output file path in the videos directory
        const outputFilePath = path.join(pathToDirectory(Directory.videos), outputFileName);

        // concatenate the videos
        await new Promise((resolve, reject) => {
            ffmpeg()
                .input(textFilePath)
                .inputOptions(['-f concat', '-safe 0'])
                // .outputOptions('-c copy')
                // .videoCodec("copy")
                .save(outputFilePath)
                .on('error', err => {
                    console.log(err);
                    reject();
                })
                .on('end', resolve);
        });

        // delete concat.txt from the file system
        unlinkSync(textFilePath);
        // delete the old segment videos from the server
        filePaths.forEach(filePath => unlinkSync(filePath));

        // return the path(s) to the output file
        return {
            accessPaths: getAccessPaths(Directory.videos, outputFileName),
        };
    }

    function resolveExistingFile(name: string, pat: string, directory: Directory, mimetype?: string | null, duration?: number, rawText?: string): Upload.FileResponse<Upload.FileInformation> {
        const data = { size: 0, filepath: pat, name, type: mimetype ?? '', originalFilename: name, newFilename: path.basename(pat), mimetype: mimetype || null, hashAlgorithm: false as falsetype };
        const file = { ...data, toJSON: () => ({ ...data, length: 0, filename: data.filepath.replace(/.*\//, ''), mtime: new Date(), mimetype: mimetype || null }) };
        return {
            source: file || null,
            result: {
                accessPaths: {
                    agnostic: getAccessPaths(directory, data.filepath),
                },
                rawText,
                duration,
            },
        };
    }

    export const uploadProgress = new Map<string, string>();

    export function QueryYoutubeProgress(videoId: string) {
        // console.log(`PROGRESS:${videoId}`, (user as any)?.email);
        return uploadProgress.get(videoId) ?? 'pending data upload';
    }

    /**
     * Basically just a wrapper around rename, which 'deletes'
     * the file at the old path and 'moves' it to the new one. For simplicity, the
     * caller just has to pass in the name of the target directory, and this function
     * will resolve the actual target path from that.
     * @param file The file to move
     * @param destination One of the specific media asset directories into which to move it
     * @param suffix If the file doesn't have a suffix and you want to provide it one
     * to appear in the new location
     */
    export async function MoveParsedFile(file: formidable.File, destination: Directory, suffix?: string, text?: string, duration?: number, targetName?: string): Promise<Upload.FileResponse> {
        const { filepath } = file;
        let name = targetName ?? path.basename(filepath);
        suffix && (name += suffix);
        return new Promise(resolve => {
            const destinationPath = serverPathToFile(destination, name);
            rename(filepath, destinationPath, error => {
                resolve({
                    source: file,
                    result: error ?? {
                        accessPaths: {
                            agnostic: getAccessPaths(destination, name),
                        },
                        rawText: text,
                        duration,
                    },
                });
            });
        });
    }

    const parseExifData = async (source: string) => {
        const image = await request.get(source, { encoding: null });
        const { /* data, */ error } = await new Promise<{ data: ExifData; error: string | undefined }>(resolve => {
            // eslint-disable-next-line no-new
            new ExifImage({ image }, (exifError, data) => {
                resolve({ data, error: exifError?.message });
            });
        });
        return error ? { data: undefined, error } : { data: await exifr.parse(image), error };
    };
    /**
     * Based on the url's classification as local or remote, gleans
     * as much information as possible about the specified image
     *
     * @param source is the path or url to the image in question
     */
    export const InspectImage = async (sourceIn: string): Promise<Upload.InspectionResults | Error> => {
        let source = sourceIn;
        const rawMatches = /^data:image\/([a-z]+);base64,(.*)/.exec(source);
        let filename: string | undefined;
        /**
         * Just more edge case handling: this if clause handles the case where an image onto the canvas that
         * is represented by a base64 encoded data uri, rather than a proper file. We manually write it out
         * to the server and then carry on as if it had been put there by the Formidable form / file parser.
         */
        if (rawMatches !== null) {
            const [ext, data] = rawMatches.slice(1, 3);
            filename = `upload_${Utils.GenerateGuid()}.${ext}`;
            const resolved = filename;
            if (usingAzure()) {
                await AzureManager.UploadBase64ImageBlob(resolved, data);
                source = `${AzureManager.BASE_STRING}/${resolved}`;
            } else {
                source = `${resolvedServerUrl}${clientPathToFile(Directory.images, resolved)}`;
                source = serverPathToFile(Directory.images, resolved);
                const error = await new Promise<Error | null>(resolve => {
                    writeFile(serverPathToFile(Directory.images, resolved), data, 'base64', resolve);
                });
                if (error !== null) {
                    return error;
                }
            }
        }
        let resolvedUrl: string;
        /**
         *
         * At this point, we want to take whatever url we have and make sure it's requestable.
         * Anything that's hosted by some other website already is, but if the url is a local file url
         * (locates the file on this server machine), we have to resolve the client side url by cutting out the
         * basename subtree (i.e. /images/<some_guid>.<ext>) and put it on the end of the server's url.
         *
         * This can always be localhost, regardless of whether this is on the server or not, since we (the server, not the client)
         * will be the ones making the request, and from the perspective of dash-release or dash-web, localhost:<port> refers to the same thing
         * as the full dash-release.eastus.cloudapp.azure.com:<port>.
         */
        const matches = isLocal().exec(source);
        if (matches === null) {
            resolvedUrl = source;
        } else {
            resolvedUrl = `${resolvedServerUrl}/${matches[1].split('\\').join('/')}`;
        }
        // See header comments: not all image files have exif data (I believe only JPG is the only format that can have it)
        const exifData = await parseExifData(resolvedUrl);
        const results = {
            exifData,
            requestable: resolvedUrl,
        };

        // Use the request library to parse out file level image information in the headers
        const headerResult = await new Promise<{ headers: { [key: string]: string } }>((resolve, reject) => {
            request.head(resolvedUrl, (error, res) => (error ? reject(error) : resolve(res as { headers: { [key: string]: string } })));
        }).catch(e => {
            console.log('Error processing headers: ', e);
        });
        const { headers } = headerResult !== null && typeof headerResult === 'object' ? headerResult : { headers: {} as { [key: string]: string } };

        try {
            // Compute the native width and height ofthe image with an npm module
            const { width: nativeWidth, height: nativeHeight } = await requestImageSize(resolvedUrl);
            // Bundle up the information into an object
            return {
                source,
                // eslint-disable-next-line radix
                contentSize: parseInt(headers[size]),
                contentType: headers[type],
                nativeWidth,
                nativeHeight,
                filename,
                ...results,
            };
        } catch (e: unknown) {
            console.log(e);
            return new Error(e ? e.toString?.() : 'unkown error');
        }
    };

    /**
     * define the resizers to use
     * @param ext the extension
     * @returns an array of resize descriptions
     */
    export function imageResampleSizes(ext: string): DashUploadUtils.ImageResizer[] {
        return [
            { suffix: SizeSuffix.Original, width: 0 },
            ...[...(AcceptableMedia.imageFormats.includes(ext.toLowerCase()) ? Object.values(DashUploadUtils.Sizes) : [])].map(({ suffix, width }) => ({
                width,
                suffix,
            })),
        ];
    }

    /**
     * outputResizedImages takes in a readable stream and resizes the images according to the sizes defined at the top of this file.
     *
     * The new images will be saved to the server with the corresponding prefixes.
     * @param imgSourcePath file path for image being resized
     * @param outputFileName the basename (No suffix) of the outputted file.
     * @param outputDirectory the directory to output to, usually Directory.Images
     * @returns a map with suffixes as keys and resized filenames as values.
     */
    export async function outputResizedImages(imgSourcePath: string, outputFileName: string, unlinkSource: boolean) {
        const writtenFiles: { [suffix: string]: string } = {};
        const outputPath = path.resolve(pathToDirectory(Directory.images), outputFileName);
        const sizes = imageResampleSizes(path.extname(outputFileName));

        const imgReadStream = new Duplex();
        imgReadStream.push(fs.readFileSync(imgSourcePath));
        imgReadStream.push(null);
        await Promise.all(
            sizes.map(({ suffix }) => 
                new Promise<unknown>(res =>
                    imgReadStream.pipe(createWriteStream(writtenFiles[suffix] = InjectSize(outputPath, suffix))).on('close', res)
                )
        )); // prettier-ignore

        workerResample(imgSourcePath, outputPath, SizeSuffix.Original, unlinkSource);
        return writtenFiles;
    }

    /**
     * UploadInspectedImage() takes an image with its metadata. If Azure is being used, this method will call the Azure function
     * to execute the resizing. If Azure is not used, the function will begin to resize the image.
     *
     * @param metadata metadata object from InspectImage()
     * @param filename the name of the file
     * @param prefix the prefix to use, which will be set to '' if none is provided.
     * @param cleanUp a boolean indicating if the files should be deleted after upload. True by default.
     * @returns the accessPaths for the resized files.
     */
    export const UploadInspectedImage = async (metadata: Upload.InspectionResults, filename: string, prefix = '', cleanUp = true): Promise<Upload.ImageInformation> => {
        const { requestable, source, ...remaining } = metadata;
        const resolved = filename || `${prefix}upload_${Utils.GenerateGuid()}.${remaining.contentType.split('/')[1].toLowerCase()}`;
        const { images } = Directory;
        const information: Upload.ImageInformation = {
            accessPaths: {
                agnostic: usingAzure()
                    ? {
                          client: BLOBSTORE_URL + `/${resolved}`,
                          server: BLOBSTORE_URL + `/${resolved}`,
                      }
                    : getAccessPaths(images, resolved),
            },
            ...metadata,
        };
        let writtenFiles: { [suffix: string]: string };

        if (usingAzure()) {
            if (!RESIZE_FUNCTION_URL) {
                throw new Error('Resize function URL not provided.');
            }

            try {
                const response = await axios.post(RESIZE_FUNCTION_URL, {
                    url: requestable,
                    filename: resolved,
                });
                writtenFiles = response.data.writtenFiles;
            } catch (err) {
                console.error(err);
                writtenFiles = {};
            }
        } else {
            const unlinkSrcWhenFinished = isLocal().test(source) && cleanUp;
            try {
                writtenFiles = await outputResizedImages(metadata.source, resolved, unlinkSrcWhenFinished);
            } catch (e) {
                // input is a blob or other, try reading it to create a metadata source file.
                const reqSource = request(metadata.source);
                const readStream: Stream = reqSource instanceof Promise ? await reqSource : reqSource;
                const readSource = `${prefix}upload_${Utils.GenerateGuid()}.${metadata.contentType.split('/')[1].toLowerCase()}`;
                await new Promise<void>((res, rej) => {
                    readStream
                        .pipe(createWriteStream(readSource))
                        .on('close', () => res())
                        .on('error', () => rej());
                });
                writtenFiles = await outputResizedImages(readSource, resolved, unlinkSrcWhenFinished);
                fs.unlink(readSource, err => console.log("Couldn't unlink temporary image file:" + readSource, err));
            }
        }
        Array.from(Object.keys(writtenFiles)).forEach(suffix => {
            information.accessPaths[suffix] = getAccessPaths(images, writtenFiles[suffix]);
        });

        return information;
    };

    /**
     * Uploads an image specified by the @param source to Dash's /public/files/
     * directory, and returns information generated during that upload
     *
     * @param {string} source is either the absolute path of an already uploaded image or
     * the url of a remote image
     * @param {string} filename dictates what to call the image. If not specified,
     * the name {@param prefix}_upload_{GUID}
     * @param {string} prefix is a string prepended to the generated image name in the
     * event that @param filename is not specified
     *
     * @returns {ImageUploadInformation | Error} This method returns
     * 1) the paths to the uploaded images (plural due to resizing)
     * 2) the exif data embedded in the image, or the error explaining why exif couldn't be parsed
     * 3) the size of the image, in bytes (4432130)
     * 4) the content type of the image, i.e. image/(jpeg | png | ...)
     */
    export const UploadImage = async (source: string, filename?: string, prefix: string = ''): Promise<Upload.ImageInformation | Error> => {
        const result = await InspectImage(source);
        if (result instanceof Error) {
            return { name: result.name, message: result.message };
        }
        const outputFile = filename || result.filename || '';

        return UploadInspectedImage(result, outputFile, prefix);
    };

    type md5 = 'md5';
    type falsetype = false;
    export function uploadYoutube(videoId: string, overwriteId: string): Promise<Upload.FileResponse> {
        return new Promise<Upload.FileResponse<Upload.FileInformation>>(res => {
            const name = videoId;
            const filepath = name.replace(/^-/, '__') + '.mp4';
            const finalPath = serverPathToFile(Directory.videos, filepath);
            if (existsSync(finalPath)) {
                uploadProgress.set(overwriteId, 'computing duration');
                // eslint-disable-next-line @typescript-eslint/no-explicit-any
                exec(`yt-dlp -o ${finalPath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (error: any, stdout: any /* , stderr: any */) => {
                    const time = Array.from(stdout.trim().split(':')).reverse();
                    const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0);
                    res(resolveExistingFile(name, filepath, Directory.videos, 'video/mp4', duration, undefined));
                });
            } else {
                uploadProgress.set(overwriteId, 'starting download');
                const ytdlp = spawn(`yt-dlp`, ['-o', filepath, `https://www.youtube.com/watch?v=${videoId}`, '--max-filesize', '100M', '-f', 'mp4']);

                // eslint-disable-next-line @typescript-eslint/no-explicit-any
                ytdlp.stdout.on('data', (data: any) => uploadProgress.set(overwriteId, data.toString()));

                let errors = '';
                // eslint-disable-next-line @typescript-eslint/no-explicit-any
                ytdlp.stderr.on('data', (data: any) => {
                    uploadProgress.set(overwriteId, 'error:' + data.toString());
                    errors = data.toString();
                });

                // eslint-disable-next-line @typescript-eslint/no-explicit-any
                ytdlp.on('exit', (code: any) => {
                    if (code) {
                        res({
                            source: {
                                size: 0,
                                filepath: name,
                                originalFilename: name,
                                newFilename: name,
                                mimetype: 'video',
                                hashAlgorithm: 'md5',
                                toJSON: () => ({ newFilename: name, filepath, mimetype: 'video', mtime: new Date(), size: 0, length: 0, originalFilename: name }),
                            },
                            result: { name: 'failed youtube query', message: `Could not archive video.  ${code ? errors : uploadProgress.get(videoId)}` },
                        });
                    } else {
                        uploadProgress.set(overwriteId, 'computing duration');
                        exec(`yt-dlp-o ${filepath} "https://www.youtube.com/watch?v=${videoId}" --get-duration`, (/* error: any, stdout: any, stderr: any */) => {
                            // const time = Array.from(stdout.trim().split(':')).reverse();
                            // const duration = (time.length > 2 ? Number(time[2]) * 1000 * 60 : 0) + (time.length > 1 ? Number(time[1]) * 60 : 0) + (time.length > 0 ? Number(time[0]) : 0);
                            const data = { size: 0, filepath, name, mimetype: 'video', originalFilename: name, newFilename: name, hashAlgorithm: 'md5' as md5, type: 'video/mp4' };
                            const file = { ...data, toJSON: () => ({ ...data, length: 0, filename: data.filepath.replace(/.*\//, ''), mtime: new Date() }) };
                            MoveParsedFile(file, Directory.videos).then(output => res(output));
                        });
                    }
                });
            }
        });
    }
    const manualSuffixes = ['.webm'];

    async function UploadAudio(file: File, format: string) {
        const suffix = manualSuffixes.includes(format) ? format : undefined;
        return MoveParsedFile(file, Directory.audio, suffix);
    }

    async function UploadPdf(file: File) {
        const fileKey = (await md5File(file.filepath)) + '.pdf';
        const textFilename = `${fileKey.substring(0, fileKey.length - 4)}.txt`;
        if (fExists(fileKey, Directory.pdfs) && fExists(textFilename, Directory.text)) {
            fs.unlink(file.filepath, () => {});
            return new Promise<Upload.FileResponse>(res => {
                const pdfTextFilename = `${fileKey.substring(0, fileKey.length - 4)}.txt`;
                const readStream = createReadStream(serverPathToFile(Directory.text, pdfTextFilename));
                let rawText = '';
                readStream
                    .on('data', chunk => {
                        rawText += chunk.toString();
                    })
                    .on('end', () => res(resolveExistingFile(file.originalFilename ?? '', fileKey, Directory.pdfs, file.mimetype, undefined, rawText)));
            });
        }
        const dataBuffer = readFileSync(file.filepath);
        const result: parse.Result = await parse(dataBuffer).catch(e => e);
        if (result) {
            await new Promise<void>((resolve, reject) => {
                const writeStream = createWriteStream(serverPathToFile(Directory.text, textFilename));
                writeStream.write(result?.text, error => (error ? reject(error) : resolve()));
            });
            return MoveParsedFile(file, Directory.pdfs, undefined, result?.text, undefined, fileKey);
        }
        return { source: file, result: { name: 'faile pdf pupload', message: `Could not upload (${file.originalFilename}).${result}` } };
    }

    async function UploadCsv(file: File) {
        const { filepath: sourcePath } = file;
        // read the file as a string
        const data = readFileSync(sourcePath, 'utf8');
        // split the string into an array of lines
        return MoveParsedFile(file, Directory.csv, undefined, data);
        // console.log(csvParser(data));
    }

    export async function upload(file: File /* , overwriteGuid?: string */): Promise<Upload.FileResponse> {
        // const isAzureOn = usingAzure();
        const { mimetype, filepath, originalFilename } = file;
        const types = mimetype?.split('/') ?? [];
        // uploadProgress.set(overwriteGuid ?? name, 'uploading'); // If the client sent a guid it uses to track upload progress, use that guid. Otherwise, use the file's name.

        const category = types[0];
        let format = `.${types[1]}`;
        console.log(green(`Processing upload of file (${originalFilename}) and format (${format}) with upload type (${mimetype}) in category (${category}).`));

        switch (category) {
            case 'image':
                if (imageFormats.includes(format)) {
                    const result = await UploadImage(filepath, basename(filepath));
                    return { source: file, result };
                }
                fs.unlink(filepath, () => {});
                return { source: file, result: { name: 'Unsupported image format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .jpg` } };
            case 'video': {
                const vidFile = file;
                if (format.includes('x-matroska')) {
                    await new Promise(res => {
                        ffmpeg(vidFile.filepath)
                            .videoCodec('copy') // this will copy the data instead of reencode it
                            .save(vidFile.filepath.replace('.mkv', '.mp4'))
                            .on('end', res)
                            .on('error', console.log);
                    });
                    vidFile.filepath = vidFile.filepath.replace('.mkv', '.mp4');
                    format = '.mp4';
                }
                if (format.includes('quicktime')) {
                    let abort = false;
                    await new Promise<void>(res => {
                        // eslint-disable-next-line @typescript-eslint/no-explicit-any
                        ffmpeg.ffprobe(vidFile.filepath, (err: any, metadata: ffmpeg.FfprobeData) => {
                            if (metadata.streams.some(stream => stream.codec_name === 'hevc')) {
                                abort = true;
                            }
                            res();
                        });
                    });
                    if (abort) {
                        // bcz: instead of aborting, we could convert the file using the code below to an mp4.  Problem is that this takes a long time and will clog up the server.
                        // await new Promise(res =>
                        //     ffmpeg(file.path)
                        //         .videoCodec('libx264') // this will copy the data instead of reencode it
                        //         .audioCodec('mp2')
                        //         .save(vidFile.path.replace('.MOV', '.mp4').replace('.mov', '.mp4'))
                        //         .on('end', res)
                        // );
                        // vidFile.path = vidFile.path.replace('.mov', '.mp4').replace('.MOV', '.mp4');
                        // format = '.mp4';
                        fs.unlink(filepath, () => {});
                        return { source: file, result: { name: 'Unsupported video format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .mp4` } };
                    }
                }
                if (videoFormats.includes(format) || format.includes('.webm')) {
                    return MoveParsedFile(vidFile, Directory.videos);
                }
                fs.unlink(filepath, () => {});
                return { source: vidFile, result: { name: 'Unsupported video format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .mp4` } };
            }
            case 'application':
                if (applicationFormats.includes(format)) {
                    const val = UploadPdf(file);
                    if (val) return val;
                }
                break;
            case 'audio': {
                const components = format.split(';');
                if (components.length > 1) {
                    [format] = components;
                }
                if (audioFormats.includes(format)) {
                    return UploadAudio(file, format);
                }
                fs.unlink(filepath, () => {});
                return { source: file, result: { name: 'Unsupported audio format', message: `Could not upload unsupported file (${originalFilename}). Please convert to an .mp3` } };
            }
            case 'text':
                if (types[1] === 'csv') {
                    return UploadCsv(file);
                }
                break;
            default:
        }

        console.log(red(`Ignoring unsupported file (${originalFilename}) with upload type (${mimetype}).`));
        fs.unlink(filepath, () => {});
        return { source: file, result: new Error(`Could not upload unsupported file (${originalFilename}) with upload type (${mimetype}).`) };
    }

    export async function buildFileDirectories() {
        if (!existsSync(publicDirectory)) {
            console.error('\nPlease ensure that the following directory exists...\n');
            console.log(publicDirectory);
            process.exit(0);
        }
        if (!existsSync(filesDirectory)) {
            console.error('\nPlease ensure that the following directory exists...\n');
            console.log(filesDirectory);
            process.exit(0);
        }
        const pending = Object.keys(Directory).map(sub => createIfNotExists(`${filesDirectory}/${sub}`));
        return Promise.all(pending);
    }

    export interface RequestedImageSize {
        width: number;
        height: number;
        type: string;
    }

    export interface ImageResizer {
        width: number;
        suffix: SizeSuffix;
    }
}