aboutsummaryrefslogtreecommitdiff
path: root/src/server/DashUploadUtils.ts
diff options
context:
space:
mode:
Diffstat (limited to 'src/server/DashUploadUtils.ts')
-rw-r--r--src/server/DashUploadUtils.ts46
1 files changed, 30 insertions, 16 deletions
diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts
index a8e09818e..e2419e60a 100644
--- a/src/server/DashUploadUtils.ts
+++ b/src/server/DashUploadUtils.ts
@@ -26,6 +26,7 @@ import { ffprobe, FfmpegCommand } from 'fluent-ffmpeg';
import * as fs from 'fs';
import * as md5File from 'md5-file';
import * as autorotate from 'jpeg-autorotate';
+const { Duplex } = require('stream'); // Native Node Module
export enum SizeSuffix {
Small = '_s',
@@ -349,7 +350,11 @@ export namespace DashUploadUtils {
if (metadata instanceof Error) {
return { name: metadata.name, message: metadata.message };
}
- return UploadInspectedImage(metadata, filename || metadata.filename, prefix);
+ const outputFile = filename || metadata.filename;
+ if (!outputFile) {
+ return { name: source, message: 'output file not found' };
+ }
+ return UploadInspectedImage(metadata, outputFile, prefix);
};
export async function buildFileDirectories() {
@@ -399,13 +404,14 @@ export namespace DashUploadUtils {
const response = await AzureManager.UploadBase64ImageBlob(resolved, data);
source = `${AzureManager.BASE_STRING}/${resolved}`;
} else {
+ source = `${resolvedServerUrl}${clientPathToFile(Directory.images, resolved)}`;
+ source = serverPathToFile(Directory.images, resolved);
const error = await new Promise<Error | null>(resolve => {
writeFile(serverPathToFile(Directory.images, resolved), data, 'base64', resolve);
});
if (error !== null) {
return error;
}
- source = `${resolvedServerUrl}${clientPathToFile(Directory.images, resolved)}`;
}
}
let resolvedUrl: string;
@@ -514,7 +520,7 @@ export namespace DashUploadUtils {
* @param cleanUp a boolean indicating if the files should be deleted after upload. True by default.
* @returns the accessPaths for the resized files.
*/
- export const UploadInspectedImage = async (metadata: Upload.InspectionResults, filename?: string, prefix = '', cleanUp = true): Promise<Upload.ImageInformation> => {
+ export const UploadInspectedImage = async (metadata: Upload.InspectionResults, filename: string, prefix = '', cleanUp = true): Promise<Upload.ImageInformation> => {
const { requestable, source, ...remaining } = metadata;
const resolved = filename || `${prefix}upload_${Utils.GenerateGuid()}.${remaining.contentType.split('/')[1].toLowerCase()}`;
const { images } = Directory;
@@ -593,35 +599,43 @@ export namespace DashUploadUtils {
force: true,
};
+ async function correctRotation(imgSourcePath: string) {
+ const buffer = fs.readFileSync(imgSourcePath);
+ try {
+ return (await autorotate.rotate(buffer, { quality: 30 })).buffer;
+ } catch (e) {
+ return buffer;
+ }
+ }
+
/**
* outputResizedImages takes in a readable stream and resizes the images according to the sizes defined at the top of this file.
*
* The new images will be saved to the server with the corresponding prefixes.
- * @param streamProvider a Stream of the image to process, taken from the /parsed_files location
+ * @param imgSourcePath file path for image being resized
* @param outputFileName the basename (No suffix) of the outputted file.
* @param outputDirectory the directory to output to, usually Directory.Images
* @returns a map with suffixes as keys and resized filenames as values.
*/
- export async function outputResizedImages(sourcePath: string, outputFileName: string, outputDirectory: string) {
+ export async function outputResizedImages(imgSourcePath: string, outputFileName: string, outputDirectory: string) {
const writtenFiles: { [suffix: string]: string } = {};
const sizes = imageResampleSizes(path.extname(outputFileName));
+
+ const imgBuffer = await correctRotation(imgSourcePath);
+ const imgReadStream = new Duplex();
+ imgReadStream.push(imgBuffer);
+ imgReadStream.push(null);
const outputPath = (suffix: SizeSuffix) => path.resolve(outputDirectory, (writtenFiles[suffix] = InjectSize(outputFileName, suffix)));
await Promise.all(
sizes.filter(({ width }) => !width).map(({ suffix }) =>
- new Promise<void>(res => createReadStream(sourcePath).pipe(createWriteStream(outputPath(suffix))).on('close', res))
+ new Promise<void>(res => imgReadStream.pipe(createWriteStream(outputPath(suffix))).on('close', res))
)); // prettier-ignore
- const fileIn = fs.readFileSync(sourcePath);
- let buffer: any;
- try {
- const { buffer2 } = await autorotate.rotate(fileIn, { quality: 30 });
- buffer = buffer2;
- } catch (e) {}
- return Jimp.read(buffer ?? fileIn)
+ return Jimp.read(imgBuffer)
.then(async (img: any) => {
- await Promise.all( sizes.filter(({ width }) => width) .map(({ width, suffix }) =>
- img = img.resize(width, Jimp.AUTO).write(outputPath(suffix))
- )); // prettier-ignore
+ await Promise.all( sizes.filter(({ width }) => width).map(({ width, suffix }) =>
+ img = img.resize(width, Jimp.AUTO).write(outputPath(suffix))
+ )); // prettier-ignore
return writtenFiles;
})
.catch((e: any) => {