aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/client/documents/Documents.ts2
-rw-r--r--src/server/ApiManagers/AzureManager.ts13
-rw-r--r--src/server/DashUploadUtils.ts78
3 files changed, 57 insertions, 36 deletions
diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts
index 43c9d2e7a..2532ef226 100644
--- a/src/client/documents/Documents.ts
+++ b/src/client/documents/Documents.ts
@@ -1736,9 +1736,7 @@ export namespace DocUtils {
return;
}
const full = { ...options, _width: 400, title: name };
- // const pathname = Utils.prepend(result.accessPaths.agnostic.client);
const pathname = result.accessPaths.agnostic.client;
- // const pathname = result.accessPaths.azure.client;
const doc = await DocUtils.DocumentFromType(type, pathname, full, overwriteDoc);
if (doc) {
const proto = Doc.GetProto(doc);
diff --git a/src/server/ApiManagers/AzureManager.ts b/src/server/ApiManagers/AzureManager.ts
index e105f5d80..12bb98ad0 100644
--- a/src/server/ApiManagers/AzureManager.ts
+++ b/src/server/ApiManagers/AzureManager.ts
@@ -1,9 +1,7 @@
import { ContainerClient, BlobServiceClient } from "@azure/storage-blob";
-// import * as dotenv from 'dotenv';
import * as fs from "fs";
import { Readable, Stream } from "stream";
-// dotenv.config();
-const AZURE_STORAGE_CONNECTION_STRING = "DefaultEndpointsProtocol=https;AccountName=dashblobstore;AccountKey=3i+E5XkCz3TJ0m5QOatiEnbRACz9V1qCW72L6ldiYGH1tLdfJWa2eQoRfYmPA68lx1a6YAcfYJfWHadIxQvhGQ==;EndpointSuffix=core.windows.net";
+const AZURE_STORAGE_CONNECTION_STRING = process.env.AZURE_STORAGE_CONNECTION_STRING;
export class AzureManager {
private _containerClient: ContainerClient;
@@ -15,9 +13,8 @@ export class AzureManager {
constructor() {
if (!AZURE_STORAGE_CONNECTION_STRING) {
- throw Error("Azure Storage Connection String Not Found");
+ throw new Error("Azure Storage Connection String Not Found");
}
- // this._blobServiceClient = BlobServiceClient.fromConnectionString(process.env.AZURE_STORAGE_CONNECTION_STRING);
this._blobServiceClient = BlobServiceClient.fromConnectionString(AZURE_STORAGE_CONNECTION_STRING);
this._containerClient = this.BlobServiceClient.getContainerClient(AzureManager.CONTAINER_NAME);
}
@@ -35,7 +32,6 @@ export class AzureManager {
}
public static UploadBlob(filename: string, filepath: string, filetype: string) {
- console.log("Upload Blob File");
const blockBlobClient = this.Instance.ContainerClient.getBlockBlobClient(filename);
const blobOptions = { blobHTTPHeaders: { blobContentType: filetype }};
const stream = fs.createReadStream(filepath);
@@ -43,20 +39,17 @@ export class AzureManager {
}
public static UploadBlobStream(stream: Readable, filename: string, filetype: string) {
- console.log("Upload Blob Stream: %s, %s", filename, filetype);
const blockBlobClient = this.Instance.ContainerClient.getBlockBlobClient(filename);
const blobOptions = { blobHTTPHeaders: { blobContentType: filetype }};
return blockBlobClient.uploadStream(stream, undefined, undefined, blobOptions);
}
public static DeleteBlob(filename: string) {
- console.log("Delete Blob from filename");
const blockBlobClient = this.Instance.ContainerClient.getBlockBlobClient(filename);
return blockBlobClient.deleteIfExists();
}
public static async GetBlobs() {
- console.log("Get Blobs");
const foundBlobs = [];
for await (const blob of this.Instance.ContainerClient.listBlobsFlat()) {
console.log(`${blob.name}`);
@@ -71,4 +64,4 @@ export class AzureManager {
return foundBlobs;
}
-} \ No newline at end of file
+}
diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts
index 74c4786b6..bff60568b 100644
--- a/src/server/DashUploadUtils.ts
+++ b/src/server/DashUploadUtils.ts
@@ -18,6 +18,7 @@ import { AcceptableMedia, Upload } from './SharedMediaTypes';
import request = require('request-promise');
import formidable = require('formidable');
import { AzureManager } from './ApiManagers/AzureManager';
+import axios from 'axios';
const spawn = require('child_process').spawn;
const { exec } = require('child_process');
const parse = require('pdf-parse');
@@ -66,6 +67,9 @@ export namespace DashUploadUtils {
const size = 'content-length';
const type = 'content-type';
+ const BLOBSTORE_URL = process.env.BLOBSTORE_URL;
+ const RESIZE_FUNCTION_URL = process.env.RESIZE_FUNCTION_URL;
+
const { imageFormats, videoFormats, applicationFormats, audioFormats } = AcceptableMedia; //TODO:glr
export async function concatVideos(filePaths: string[]): Promise<Upload.AccessPathInfo> {
@@ -188,7 +192,6 @@ export namespace DashUploadUtils {
export async function upload(file: File, overwriteGuid?: string): Promise<Upload.FileResponse> {
const isAzureOn = usingAzure();
- console.log("Azure usage: ", isAzureOn);
const { type, path, name } = file;
const types = type?.split('/') ?? [];
uploadProgress.set(overwriteGuid ?? name, 'uploading'); // If the client sent a guid it uses to track upload progress, use that guid. Otherwise, use the file's name.
@@ -200,7 +203,7 @@ export namespace DashUploadUtils {
switch (category) {
case 'image':
if (imageFormats.includes(format)) {
- const result = await UploadImage(path, basename(path), name);
+ const result = await UploadImage(path, basename(path));
return { source: file, result };
}
fs.unlink(path, () => {});
@@ -330,13 +333,12 @@ export namespace DashUploadUtils {
* 3) the size of the image, in bytes (4432130)
* 4) the content type of the image, i.e. image/(jpeg | png | ...)
*/
- export const UploadImage = async (source: string, filename?: string, originalFilename?: string, prefix: string = ''): Promise<Upload.ImageInformation | Error> => {
+ export const UploadImage = async (source: string, filename?: string, prefix: string = ''): Promise<Upload.ImageInformation | Error> => {
const metadata = await InspectImage(source);
if (metadata instanceof Error) {
return { name: metadata.name, message: metadata.message };
}
- console.log(originalFilename);
- return UploadInspectedImage(metadata, filename || metadata.filename, originalFilename, prefix);
+ return UploadInspectedImage(metadata, filename || metadata.filename, prefix);
};
export async function buildFileDirectories() {
@@ -486,25 +488,48 @@ export namespace DashUploadUtils {
};
}
- export const UploadInspectedImage = async (metadata: Upload.InspectionResults, filename?: string, originalFilename?: string, prefix = '', cleanUp = true): Promise<Upload.ImageInformation> => {
+ /**
+ * UploadInspectedImage() takes an image with its metadata. If Azure is being used, this method will call the Azure function
+ * to execute the resizing. If Azure is not used, the function will begin to resize the image.
+ *
+ * @param metadata metadata object from InspectImage()
+ * @param filename the name of the file
+ * @param prefix the prefix to use, which will be set to '' if none is provided.
+ * @param cleanUp a boolean indicating if the files should be deleted after upload. True by default.
+ * @returns the accessPaths for the resized files.
+ */
+ export const UploadInspectedImage = async (metadata: Upload.InspectionResults, filename?: string, prefix = '', cleanUp = true): Promise<Upload.ImageInformation> => {
const { requestable, source, ...remaining } = metadata;
const resolved = filename || `${prefix}upload_${Utils.GenerateGuid()}.${remaining.contentType.split('/')[1].toLowerCase()}`;
const { images } = Directory;
const information: Upload.ImageInformation = {
accessPaths: {
- // agnostic: getAccessPaths(images, resolved),
- // azure: {
- // client: `https://dashblobstore.blob.core.windows.net/dashmedia/${filename}`,
- // server: `https://dashblobstore.blob.core.windows.net/dashmedia/${filename}`
- // }
agnostic: usingAzure() ? {
- client: `https://dashblobstore.blob.core.windows.net/dashmedia/${filename}`,
- server: `https://dashblobstore.blob.core.windows.net/dashmedia/${filename}`
+ client: BLOBSTORE_URL + `/${filename}`,
+ server: BLOBSTORE_URL + `/${filename}`
} : getAccessPaths(images, resolved)
},
...metadata,
};
- const writtenFiles = await outputResizedImages(() => request(requestable), resolved, pathToDirectory(Directory.images), originalFilename, metadata.contentType);
+ let writtenFiles: { [suffix: string] : string};
+
+ if (usingAzure()) {
+ if (!RESIZE_FUNCTION_URL) {
+ throw new Error("Resize function URL not provided.");
+ }
+
+ try {
+ const response = await axios.post(RESIZE_FUNCTION_URL, {
+ url: requestable
+ });
+ writtenFiles = response.data.writtenFiles;
+ } catch (err) {
+ console.error(err);
+ writtenFiles = {};
+ }
+ } else {
+ writtenFiles = await outputResizedImages(() => request(requestable), resolved, pathToDirectory(Directory.images));
+ }
for (const suffix of Object.keys(writtenFiles)) {
information.accessPaths[suffix] = getAccessPaths(images, writtenFiles[suffix]);
}
@@ -549,31 +574,36 @@ export namespace DashUploadUtils {
force: true,
};
- export async function outputResizedImages(streamProvider: () => Stream | Promise<Stream>, outputFileName: string, outputDirectory: string, originalFilename?: string, contentType?: string) {
- const start = Date.now();
+ /**
+ * outputResizedImages takes in a readable stream and resizes the images according to the sizes defined at the top of this file.
+ *
+ * The new images will be saved to the server with the corresponding prefixes.
+ * @param streamProvider a Stream of the image to process, taken from the /parsed_files location
+ * @param outputFileName the basename (No suffix) of the outputted file.
+ * @param outputDirectory the directory to output to, usually Directory.Images
+ * @returns a map with suffixes as keys and resized filenames as values.
+ */
+ export async function outputResizedImages(streamProvider: () => Stream | Promise<Stream>, outputFileName: string, outputDirectory: string) {
const writtenFiles: { [suffix: string]: string } = {};
for (const { resizer, suffix } of resizers(path.extname(outputFileName))) {
const outputPath = path.resolve(outputDirectory, (writtenFiles[suffix] = InjectSize(outputFileName, suffix)));
- console.log(`https://dashblobstore.blob.core.windows.net/dashmedia/${InjectSize(originalFilename!, suffix)}`);
- console.log(`https://dashblobstore.blob.core.windows.net/dashmedia/${InjectSize(outputFileName, suffix)}`);
await new Promise<void>(async (resolve, reject) => {
const source = streamProvider();
let readStream: Stream = source instanceof Promise ? await source : source;
if (resizer) {
readStream = readStream.pipe(resizer.withMetadata());
}
- if(contentType && usingAzure()) {
- // AzureManager.UploadBlobStream(readStream as Readable, InjectSize(originalFilename, suffix), contentType);
- AzureManager.UploadBlobStream(readStream as Readable, InjectSize(outputFileName, suffix), contentType);
- }
readStream.pipe(createWriteStream(outputPath)).on('close', resolve).on('error', reject);
});
}
- const end = Date.now();
- console.log(`Time taken: ${end - start}ms`);
return writtenFiles;
}
+ /**
+ * define the resizers to use
+ * @param ext the extension
+ * @returns an array of resizer functions from sharp
+ */
function resizers(ext: string): DashUploadUtils.ImageResizer[] {
return [
{ suffix: SizeSuffix.Original },