aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/client/Network.ts97
-rw-r--r--src/client/documents/Documents.ts7
-rw-r--r--src/client/util/DragManager.ts44
-rw-r--r--src/client/views/nodes/DocumentView.tsx4
-rw-r--r--src/client/views/nodes/LoadingBox.scss2
-rw-r--r--src/client/views/nodes/LoadingBox.tsx4
-rw-r--r--src/client/views/nodes/VideoBox.tsx10
-rw-r--r--src/server/DashUploadUtils.ts236
8 files changed, 225 insertions, 179 deletions
diff --git a/src/client/Network.ts b/src/client/Network.ts
index c781d4b6b..a222b320f 100644
--- a/src/client/Network.ts
+++ b/src/client/Network.ts
@@ -1,57 +1,54 @@
-import { Utils } from "../Utils";
+import { Utils } from '../Utils';
import requestPromise = require('request-promise');
-import { Upload } from "../server/SharedMediaTypes";
+import { Upload } from '../server/SharedMediaTypes';
export namespace Networking {
+ export async function FetchFromServer(relativeRoute: string) {
+ return (await fetch(relativeRoute)).text();
+ }
- export async function FetchFromServer(relativeRoute: string) {
- return (await fetch(relativeRoute)).text();
- }
+ export async function PostToServer(relativeRoute: string, body?: any) {
+ const options = {
+ uri: Utils.prepend(relativeRoute),
+ method: 'POST',
+ body,
+ json: true,
+ };
+ return requestPromise.post(options);
+ }
- export async function PostToServer(relativeRoute: string, body?: any) {
- const options = {
- uri: Utils.prepend(relativeRoute),
- method: "POST",
- body,
- json: true
- };
- return requestPromise.post(options);
- }
-
- /**
- * Handles uploading basic file types to server and makes the API call to "/uploadFormData" endpoint
- * with the mapping of GUID to filem as parameters.
- *
- * @param files the files to be uploaded to the server
- * @returns the response as a json from the server
- */
- export async function UploadFilesToServer<T extends Upload.FileInformation = Upload.FileInformation>(files: File | File[]): Promise<Upload.FileResponse<T>[]> {
- const formData = new FormData();
- if (Array.isArray(files)) {
- if (!files.length) {
- return [];
- }
- files.forEach(file => formData.append(Utils.GenerateGuid(), file));
- } else {
- formData.append(Utils.GenerateGuid(), files);
+ /**
+ * Handles uploading basic file types to server and makes the API call to "/uploadFormData" endpoint
+ * with the mapping of GUID to filem as parameters.
+ *
+ * @param files the files to be uploaded to the server
+ * @returns the response as a json from the server
+ */
+ export async function UploadFilesToServer<T extends Upload.FileInformation = Upload.FileInformation>(files: File | File[]): Promise<Upload.FileResponse<T>[]> {
+ const formData = new FormData();
+ if (Array.isArray(files)) {
+ if (!files.length) {
+ return [];
}
- const parameters = {
- method: 'POST',
- body: formData
- };
- const response = await fetch("/uploadFormData", parameters);
- return response.json();
- }
-
- export async function UploadYoutubeToServer<T extends Upload.FileInformation = Upload.FileInformation>(videoId: string): Promise<Upload.FileResponse<T>[]> {
- const parameters = {
- method: 'POST',
- body: JSON.stringify({ videoId }),
- json: true
- };
- const response = await fetch("/uploadYoutubeVideo", parameters);
- return response.json();
- }
-
+ files.forEach(file => formData.append(Utils.GenerateGuid(), file));
+ } else {
+ formData.append(Utils.GenerateGuid(), files);
+ }
+ const parameters = {
+ method: 'POST',
+ body: formData,
+ };
+ const response = await fetch('/uploadFormData', parameters);
+ return response.json();
+ }
-} \ No newline at end of file
+ export async function UploadYoutubeToServer<T extends Upload.FileInformation = Upload.FileInformation>(videoId: string): Promise<Upload.FileResponse<T>[]> {
+ const parameters = {
+ method: 'POST',
+ body: JSON.stringify({ videoId }),
+ json: true,
+ };
+ const response = await fetch('/uploadYoutubeVideo', parameters);
+ return response.json();
+ }
+}
diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts
index b22e16633..8c3b91177 100644
--- a/src/client/documents/Documents.ts
+++ b/src/client/documents/Documents.ts
@@ -1849,7 +1849,12 @@ export namespace DocUtils {
source: { name, type },
result,
} = upfiles.lastElement();
- name && type && processFileupload(generatedDocuments, name, type, result, options, overwriteDoc);
+ if ((result as any).message) {
+ if (overwriteDoc) {
+ overwriteDoc.isLoading = false;
+ overwriteDoc.errorMessage = (result as any).message;
+ }
+ } else name && type && processFileupload(generatedDocuments, name, type, result, options, overwriteDoc);
});
}
diff --git a/src/client/util/DragManager.ts b/src/client/util/DragManager.ts
index 6386c87a0..dfd916e92 100644
--- a/src/client/util/DragManager.ts
+++ b/src/client/util/DragManager.ts
@@ -344,8 +344,7 @@ export namespace DragManager {
}
Object.assign(dragDiv.style, { width: '', height: '', overflow: '' });
dragDiv.hidden = false;
- const scaleXs: number[] = [],
- scaleYs: number[] = [],
+ const scalings: number[] = [],
xs: number[] = [],
ys: number[] = [];
@@ -355,8 +354,15 @@ export namespace DragManager {
top: Number.MAX_SAFE_INTEGER,
bottom: Number.MIN_SAFE_INTEGER,
};
+ let rot = 0;
const docsToDrag = dragData instanceof DocumentDragData ? dragData.draggedDocuments : dragData instanceof AnchorAnnoDragData ? [dragData.dragDocument] : [];
const dragElements = eles.map(ele => {
+ if (ele?.parentElement?.parentElement?.parentElement?.className === 'collectionFreeFormDocumentView-container') {
+ ele = ele.parentElement.parentElement.parentElement;
+ const rotStr = ele.style.transform.replace(/.*rotate\(([-0-9.]*)deg\).*/, '$1');
+ if (rotStr) rot = Number(rotStr);
+ }
+ if (rot < 0) rot += 360;
if (!ele.parentNode) dragDiv.appendChild(ele);
const dragElement = ele.parentNode === dragDiv ? ele : (ele.cloneNode(true) as HTMLElement);
const children = Array.from(dragElement.children);
@@ -376,19 +382,29 @@ export namespace DragManager {
}
}
const rect = ele.getBoundingClientRect();
- const scaleX = rect.width / (ele.offsetWidth || rect.width);
- const scaleY = scaleX; //ele.offsetHeight ? rect.height / (ele.offsetHeight || rect.height) : scaleX;
+ const rotWidth = (rot > 45 && rot < 135) || (rot > 215 && rot < 305) ? rect.height : rect.width; //rect.width * Math.cos((rot * Math.PI) / 180) + rect.height * Math.sin((rot * Math.PI) / 180);
+ const scaling = rot ? rotWidth / ele.offsetWidth : rect.width / (ele.offsetWidth || rect.width);
elesCont.left = Math.min(rect.left, elesCont.left);
elesCont.top = Math.min(rect.top, elesCont.top);
elesCont.right = Math.max(rect.right, elesCont.right);
elesCont.bottom = Math.max(rect.bottom, elesCont.bottom);
- xs.push(rect.left + (options?.offsetX || 0));
- ys.push(rect.top + (options?.offsetY || 0));
- scaleXs.push(scaleX);
- scaleYs.push(scaleY);
+ const rotRad = (rot / 180) * Math.PI;
+ xs.push(
+ (rot > 90 && rot <= 270 ? rect.right : rect.left) + //
+ (rot > 270 ? -scaling * (ele.offsetHeight * Math.sin(rotRad)) : 0) +
+ (rot <= 90 || rot > 180 ? scaling * (ele.offsetHeight * Math.sin(rotRad)) : 0) +
+ (options?.offsetX || 0)
+ );
+ ys.push(
+ rect.top + //
+ (rot > 180 ? -scaling * (ele.offsetWidth * Math.sin(rotRad)) : 0) +
+ (rot >= 90 && rot < 270 ? -scaling * (ele.offsetHeight * Math.cos(rotRad)) : 0) +
+ (options?.offsetY || 0)
+ );
+ scalings.push(scaling);
Object.assign(dragElement.style, {
- opacity: '0.7',
+ opacity: '0',
position: 'absolute',
margin: '0',
top: '0',
@@ -399,9 +415,9 @@ export namespace DragManager {
borderRadius: getComputedStyle(ele).borderRadius,
zIndex: globalCssVariables.contextMenuZindex,
transformOrigin: '0 0',
- width: `${rect.width / scaleX}px`,
- height: `${rect.height / scaleY}px`,
- transform: `translate(${xs[0]}px, ${ys[0]}px) scale(${scaleX}, ${scaleY})`,
+ width: rot ? '' : `${rect.width / scaling}px`,
+ height: rot ? '' : `${rect.height / scaling}px`,
+ transform: `translate(${xs[0]}px, ${ys[0]}px) rotate(${rot}deg)`,
});
dragLabel.style.transform = `translate(${xs[0]}px, ${ys[0] - 20}px)`;
@@ -415,6 +431,8 @@ export namespace DragManager {
[dragElement, ...Array.from(dragElement.getElementsByTagName('*'))].forEach(ele => (ele as any).style && ((ele as any).style.pointerEvents = 'none'));
dragDiv.appendChild(dragElement);
+ scalings[scalings.length - 1] = rect.width / dragElement.getBoundingClientRect().width;
+ setTimeout(() => (dragElement.style.opacity = '0.7'));
if (dragElement !== ele) {
const children = [Array.from(ele.children), Array.from(dragElement.children)];
while (children[0].length) {
@@ -542,7 +560,7 @@ export namespace DragManager {
const moveVec = { x: x - lastPt.x, y: y - lastPt.y };
lastPt = { x, y };
- dragElements.map((dragElement, i) => (dragElement.style.transform = `translate(${(xs[i] += moveVec.x)}px, ${(ys[i] += moveVec.y)}px) scale(${scaleXs[i]}, ${scaleYs[i]})`));
+ dragElements.map((dragElement, i) => (dragElement.style.transform = `translate(${(xs[i] += moveVec.x)}px, ${(ys[i] += moveVec.y)}px) rotate(${rot}deg) scale(${scalings[i]})`));
dragLabel.style.transform = `translate(${xs[0]}px, ${ys[0] - 20}px)`;
};
const upHandler = (e: PointerEvent) => {
diff --git a/src/client/views/nodes/DocumentView.tsx b/src/client/views/nodes/DocumentView.tsx
index 01fadb48d..113574a64 100644
--- a/src/client/views/nodes/DocumentView.tsx
+++ b/src/client/views/nodes/DocumentView.tsx
@@ -498,8 +498,8 @@ export class DocumentViewInternal extends DocComponent<DocumentViewInternalProps
.ScreenToLocalTransform()
.scale(this.NativeDimScaling)
.transformDirection(x - left, y - top);
- dragData.offset[0] = Math.min(this.rootDoc[WidthSym](), dragData.offset[0]);
- dragData.offset[1] = Math.min(this.rootDoc[HeightSym](), dragData.offset[1]);
+ // dragData.offset[0] = Math.min(this.rootDoc[WidthSym](), dragData.offset[0]); // bcz: this was breaking dragging rotated objects since the offset may be out of bounds with regard to the unrotated document
+ // dragData.offset[1] = Math.min(this.rootDoc[HeightSym](), dragData.offset[1]);
dragData.dropAction = dropAction;
dragData.treeViewDoc = this.props.treeViewDoc;
dragData.removeDocument = this.props.removeDocument;
diff --git a/src/client/views/nodes/LoadingBox.scss b/src/client/views/nodes/LoadingBox.scss
index f6912f547..d63ed2575 100644
--- a/src/client/views/nodes/LoadingBox.scss
+++ b/src/client/views/nodes/LoadingBox.scss
@@ -4,6 +4,8 @@
align-content: center;
justify-content: center;
background-color: #fdfdfd;
+ height: 100%;
+ align-items: center;
}
.textContainer {
diff --git a/src/client/views/nodes/LoadingBox.tsx b/src/client/views/nodes/LoadingBox.tsx
index f3243f6cd..462ad425a 100644
--- a/src/client/views/nodes/LoadingBox.tsx
+++ b/src/client/views/nodes/LoadingBox.tsx
@@ -37,9 +37,9 @@ export class LoadingBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
render() {
return (
- <div className="loadingBoxContainer">
+ <div className="loadingBoxContainer" style={{ background: this.rootDoc.isLoading ? '' : 'red' }}>
<div className="textContainer">
- <p className="headerText">{this.rootDoc.isLoading ? 'Loading:' : 'Error Loading File:'}</p>
+ <p className="headerText">{this.rootDoc.isLoading ? 'Loading:' : StrCast(this.rootDoc.errorMessage, 'Error Loading File:')}</p>
<span className="text">{StrCast(this.rootDoc.title)}</span>
{!this.rootDoc.isLoading ? null : <ReactLoading type={'spinningBubbles'} color={'blue'} height={100} width={100} />}
</div>
diff --git a/src/client/views/nodes/VideoBox.tsx b/src/client/views/nodes/VideoBox.tsx
index 0ff15f93b..6ff11258d 100644
--- a/src/client/views/nodes/VideoBox.tsx
+++ b/src/client/views/nodes/VideoBox.tsx
@@ -396,10 +396,12 @@ export class VideoBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
// sets video info on load
videoLoad = action(() => {
- const aspect = this.player!.videoWidth / this.player!.videoHeight;
- Doc.SetNativeWidth(this.dataDoc, this.player!.videoWidth);
- Doc.SetNativeHeight(this.dataDoc, this.player!.videoHeight);
- this.layoutDoc._height = NumCast(this.layoutDoc._width) / aspect;
+ const aspect = this.player!.videoWidth / (this.player!.videoHeight || 1);
+ if (aspect) {
+ Doc.SetNativeWidth(this.dataDoc, this.player!.videoWidth);
+ Doc.SetNativeHeight(this.dataDoc, this.player!.videoHeight);
+ this.layoutDoc._height = NumCast(this.layoutDoc._width) / aspect;
+ }
if (Number.isFinite(this.player!.duration)) {
this.rawDuration = this.player!.duration;
} else this.rawDuration = NumCast(this.dataDoc[this.fieldKey + '-duration']);
diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts
index cae35da60..ef7192ecc 100644
--- a/src/server/DashUploadUtils.ts
+++ b/src/server/DashUploadUtils.ts
@@ -4,33 +4,33 @@ import * as exifr from 'exifr';
import { File } from 'formidable';
import { createWriteStream, existsSync, readFileSync, rename, unlinkSync, writeFile } from 'fs';
import * as path from 'path';
-import { basename } from "path";
+import { basename } from 'path';
import * as sharp from 'sharp';
import { Stream } from 'stream';
import { filesDirectory, publicDirectory } from '.';
import { Opt } from '../fields/Doc';
-import { ParsedPDF } from "../server/PdfTypes";
+import { ParsedPDF } from '../server/PdfTypes';
import { Utils } from '../Utils';
import { createIfNotExists } from './ActionUtilities';
import { clientPathToFile, Directory, pathToDirectory, serverPathToFile } from './ApiManagers/UploadManager';
-import { resolvedServerUrl } from "./server_Initialization";
+import { resolvedServerUrl } from './server_Initialization';
import { AcceptableMedia, Upload } from './SharedMediaTypes';
import request = require('request-promise');
import formidable = require('formidable');
import { file } from 'jszip';
import { csvParser } from './DataVizUtils';
-const { exec } = require("child_process");
+const { exec } = require('child_process');
const parse = require('pdf-parse');
-const ffmpeg = require("fluent-ffmpeg");
-const fs = require("fs");
-const requestImageSize = require("../client/util/request-image-size");
+const ffmpeg = require('fluent-ffmpeg');
+const fs = require('fs');
+const requestImageSize = require('../client/util/request-image-size');
export enum SizeSuffix {
- Small = "_s",
- Medium = "_m",
- Large = "_l",
- Original = "_o",
- None = ""
+ Small = '_s',
+ Medium = '_m',
+ Large = '_l',
+ Original = '_o',
+ None = '',
}
export function InjectSize(filename: string, size: SizeSuffix) {
@@ -43,7 +43,6 @@ function isLocal() {
}
export namespace DashUploadUtils {
-
export interface Size {
width: number;
suffix: SizeSuffix;
@@ -59,19 +58,19 @@ export namespace DashUploadUtils {
return AcceptableMedia.imageFormats.includes(path.extname(url).toLowerCase());
}
- const size = "content-length";
- const type = "content-type";
+ const size = 'content-length';
+ const type = 'content-type';
+
+ const { imageFormats, videoFormats, applicationFormats, audioFormats } = AcceptableMedia; //TODO:glr
- const { imageFormats, videoFormats, applicationFormats, audioFormats } = AcceptableMedia; //TODO:glr
-
export async function concatVideos(filePaths: string[]): Promise<Upload.AccessPathInfo> {
// make a list of paths to create the ordered text file for ffmpeg
const inputListName = 'concat.txt';
const textFilePath = path.join(filesDirectory, inputListName);
// make a list of paths to create the ordered text file for ffmpeg
- const filePathsText = filePaths.map(filePath => `file '${filePath}'`).join('\n');
+ const filePathsText = filePaths.map(filePath => `file '${filePath}'`).join('\n');
// write the text file to the file system
- writeFile(textFilePath, filePathsText, (err) => console.log(err));
+ writeFile(textFilePath, filePathsText, err => console.log(err));
// make output file name based on timestamp
const outputFileName = `output-${Utils.GenerateGuid()}.mp4`;
@@ -81,87 +80,110 @@ export namespace DashUploadUtils {
// concatenate the videos
await new Promise((resolve, reject) => {
var merge = ffmpeg();
- merge.input(textFilePath)
- .inputOptions(['-f concat', '-safe 0'])
+ merge
+ .input(textFilePath)
+ .inputOptions(['-f concat', '-safe 0'])
.outputOptions('-c copy')
//.videoCodec("copy")
.save(outputFilePath)
- .on("error", reject)
- .on("end", resolve);
- })
-
- // delete concat.txt from the file system
- unlinkSync(textFilePath);
- // delete the old segment videos from the server
- filePaths.forEach(filePath => unlinkSync(filePath));
-
- // return the path(s) to the output file
- return {
- accessPaths: getAccessPaths(Directory.videos, outputFileName)
- }
+ .on('error', reject)
+ .on('end', resolve);
+ });
+
+ // delete concat.txt from the file system
+ unlinkSync(textFilePath);
+ // delete the old segment videos from the server
+ filePaths.forEach(filePath => unlinkSync(filePath));
+
+ // return the path(s) to the output file
+ return {
+ accessPaths: getAccessPaths(Directory.videos, outputFileName),
+ };
}
export function uploadYoutube(videoId: string): Promise<Upload.FileResponse> {
- console.log("UPLOAD " + videoId);
+ console.log('UPLOAD ' + videoId);
return new Promise<Upload.FileResponse<Upload.FileInformation>>((res, rej) => {
- exec('youtube-dl -o ' + (videoId + ".mp4") + ' https://www.youtube.com/watch?v=' + videoId + ' -f "best[filesize<50M]"',
- (error: any, stdout: any, stderr: any) => {
- if (error) console.log(`error: ${error.message}`);
- else if (stderr) console.log(`stderr: ${stderr}`);
- else {
- console.log(`stdout: ${stdout}`);
- const data = { size: 0, path: videoId + ".mp4", name: videoId, type: "video/mp4" };
- const file = { ...data, toJSON: () => ({ ...data, filename: data.path.replace(/.*\//, ""), mtime: null, length: 0, mime: "", toJson: () => undefined as any }) };
- res(MoveParsedFile(file, Directory.videos));
- }
- });
+ exec('youtube-dl -o ' + (videoId + '.mp4') + ' https://www.youtube.com/watch?v=' + videoId + ' -f "best[filesize<50M]"', (error: any, stdout: any, stderr: any) => {
+ if (error) console.log(`error: ${error.message}`);
+ else if (stderr) console.log(`stderr: ${stderr}`);
+ else {
+ console.log(`stdout: ${stdout}`);
+ const data = { size: 0, path: videoId + '.mp4', name: videoId, type: 'video/mp4' };
+ const file = { ...data, toJSON: () => ({ ...data, filename: data.path.replace(/.*\//, ''), mtime: null, length: 0, mime: '', toJson: () => undefined as any }) };
+ res(MoveParsedFile(file, Directory.videos));
+ }
+ });
});
}
export async function upload(file: File): Promise<Upload.FileResponse> {
const { type, path, name } = file;
- const types = type?.split("/") ?? [];
+ const types = type?.split('/') ?? [];
const category = types[0];
let format = `.${types[1]}`;
console.log(green(`Processing upload of file (${name}) and format (${format}) with upload type (${type}) in category (${category}).`));
-
+
switch (category) {
- case "image":
+ case 'image':
if (imageFormats.includes(format)) {
const result = await UploadImage(path, basename(path));
return { source: file, result };
}
- case "video":
- if (format.includes("x-matroska")) {
- console.log("case video");
- await new Promise(res => ffmpeg(file.path)
- .videoCodec("copy") // this will copy the data instead of reencode it
- .save(file.path.replace(".mkv", ".mp4"))
- .on('end', res));
- file.path = file.path.replace(".mkv", ".mp4");
- format = ".mp4";
+ case 'video':
+ if (format.includes('x-matroska')) {
+ console.log('case video');
+ await new Promise(res =>
+ ffmpeg(file.path)
+ .videoCodec('copy') // this will copy the data instead of reencode it
+ .save(file.path.replace('.mkv', '.mp4'))
+ .on('end', res)
+ );
+ file.path = file.path.replace('.mkv', '.mp4');
+ format = '.mp4';
+ }
+ if (format.includes('quicktime')) {
+ let abort = false;
+ await new Promise<void>(res =>
+ ffmpeg.ffprobe(file.path, (err: any, metadata: any) => {
+ if (metadata.streams.some((stream: any) => stream.codec_name === 'hevc')) {
+ abort = true;
+ }
+ res();
+ })
+ );
+ if (abort) return { source: file, result: { name: 'Unsupported video format', message: `Could not upload unsupported file (${name}). Please convert to an .mp4` } };
+ // bcz: instead of aborting, we could convert the file using the code below to an mp4. Problem is that this takes a long time and will clog up the server.
+ // await new Promise(res =>
+ // ffmpeg(file.path)
+ // .videoCodec('libx264') // this will copy the data instead of reencode it
+ // .audioCodec('mp2')
+ // .save(file.path.replace('.MOV', '.mp4').replace('.mov', '.mp4'))
+ // .on('end', res)
+ // );
+ // file.path = file.path.replace('.mov', '.mp4').replace('.MOV', '.mp4');
+ // format = '.mp4';
}
if (videoFormats.includes(format)) {
return MoveParsedFile(file, Directory.videos);
}
- case "application":
+ case 'application':
if (applicationFormats.includes(format)) {
return UploadPdf(file);
}
- case "audio":
- const components = format.split(";");
+ case 'audio':
+ const components = format.split(';');
if (components.length > 1) {
format = components[0];
}
if (audioFormats.includes(format)) {
return UploadAudio(file, format);
}
- case "text":
- if (types[1] == "csv") {
+ case 'text':
+ if (types[1] == 'csv') {
return UploadCsv(file);
}
-
}
console.log(red(`Ignoring unsupported file (${name}) with upload type (${type}).`));
@@ -176,22 +198,21 @@ export namespace DashUploadUtils {
const name = path.basename(sourcePath);
const textFilename = `${name.substring(0, name.length - 4)}.txt`;
const writeStream = createWriteStream(serverPathToFile(Directory.text, textFilename));
- writeStream.write(result.text, error => error ? reject(error) : resolve());
+ writeStream.write(result.text, error => (error ? reject(error) : resolve()));
});
return MoveParsedFile(file, Directory.pdfs, undefined, result.text);
}
async function UploadCsv(file: File) {
- const { path: sourcePath } = file;
- // read the file as a string
+ const { path: sourcePath } = file;
+ // read the file as a string
const data = readFileSync(sourcePath, 'utf8');
// split the string into an array of lines
return MoveParsedFile(file, Directory.csv, undefined, data);
// console.log(csvParser(data));
-
}
- const manualSuffixes = [".webm"];
+ const manualSuffixes = ['.webm'];
async function UploadAudio(file: File, format: string) {
const suffix = manualSuffixes.includes(format) ? format : undefined;
@@ -200,22 +221,22 @@ export namespace DashUploadUtils {
/**
* Uploads an image specified by the @param source to Dash's /public/files/
- * directory, and returns information generated during that upload
- *
+ * directory, and returns information generated during that upload
+ *
* @param {string} source is either the absolute path of an already uploaded image or
* the url of a remote image
* @param {string} filename dictates what to call the image. If not specified,
* the name {@param prefix}_upload_{GUID}
* @param {string} prefix is a string prepended to the generated image name in the
* event that @param filename is not specified
- *
+ *
* @returns {ImageUploadInformation | Error} This method returns
* 1) the paths to the uploaded images (plural due to resizing)
* 2) the exif data embedded in the image, or the error explaining why exif couldn't be parsed
* 3) the size of the image, in bytes (4432130)
* 4) the content type of the image, i.e. image/(jpeg | png | ...)
*/
- export const UploadImage = async (source: string, filename?: string, prefix: string = ""): Promise<Upload.ImageInformation | Error> => {
+ export const UploadImage = async (source: string, filename?: string, prefix: string = ''): Promise<Upload.ImageInformation | Error> => {
const metadata = await InspectImage(source);
if (metadata instanceof Error) {
return metadata;
@@ -225,12 +246,12 @@ export namespace DashUploadUtils {
export async function buildFileDirectories() {
if (!existsSync(publicDirectory)) {
- console.error("\nPlease ensure that the following directory exists...\n");
+ console.error('\nPlease ensure that the following directory exists...\n');
console.log(publicDirectory);
process.exit(0);
}
if (!existsSync(filesDirectory)) {
- console.error("\nPlease ensure that the following directory exists...\n");
+ console.error('\nPlease ensure that the following directory exists...\n');
console.log(filesDirectory);
process.exit(0);
}
@@ -252,7 +273,7 @@ export namespace DashUploadUtils {
/**
* Based on the url's classification as local or remote, gleans
* as much information as possible about the specified image
- *
+ *
* @param source is the path or url to the image in question
*/
export const InspectImage = async (source: string): Promise<Upload.InspectionResults | Error> => {
@@ -265,9 +286,9 @@ export namespace DashUploadUtils {
*/
if ((rawMatches = /^data:image\/([a-z]+);base64,(.*)/.exec(source)) !== null) {
const [ext, data] = rawMatches.slice(1, 3);
- const resolved = filename = `upload_${Utils.GenerateGuid()}.${ext}`;
+ const resolved = (filename = `upload_${Utils.GenerateGuid()}.${ext}`);
const error = await new Promise<Error | null>(resolve => {
- writeFile(serverPathToFile(Directory.images, resolved), data, "base64", resolve);
+ writeFile(serverPathToFile(Directory.images, resolved), data, 'base64', resolve);
});
if (error !== null) {
return error;
@@ -276,12 +297,12 @@ export namespace DashUploadUtils {
}
let resolvedUrl: string;
/**
- *
+ *
* At this point, we want to take whatever url we have and make sure it's requestable.
* Anything that's hosted by some other website already is, but if the url is a local file url
* (locates the file on this server machine), we have to resolve the client side url by cutting out the
* basename subtree (i.e. /images/<some_guid>.<ext>) and put it on the end of the server's url.
- *
+ *
* This can always be localhost, regardless of whether this is on the server or not, since we (the server, not the client)
* will be the ones making the request, and from the perspective of dash-release or dash-web, localhost:<port> refers to the same thing
* as the full dash-release.eastus.cloudapp.azure.com:<port>.
@@ -290,18 +311,18 @@ export namespace DashUploadUtils {
if (matches === null) {
resolvedUrl = source;
} else {
- resolvedUrl = `${resolvedServerUrl}/${matches[1].split("\\").join("/")}`;
+ resolvedUrl = `${resolvedServerUrl}/${matches[1].split('\\').join('/')}`;
}
// See header comments: not all image files have exif data (I believe only JPG is the only format that can have it)
const exifData = await parseExifData(resolvedUrl);
const results = {
exifData,
- requestable: resolvedUrl
+ requestable: resolvedUrl,
};
// Use the request library to parse out file level image information in the headers
- const { headers } = (await new Promise<any>((resolve, reject) => {
- request.head(resolvedUrl, (error, res) => error ? reject(error) : resolve(res));
- }).catch(console.error));
+ const { headers } = await new Promise<any>((resolve, reject) => {
+ request.head(resolvedUrl, (error, res) => (error ? reject(error) : resolve(res)));
+ }).catch(console.error);
try {
// Compute the native width and height ofthe image with an npm module
const { width: nativeWidth, height: nativeHeight } = await requestImageSize(resolvedUrl);
@@ -313,7 +334,7 @@ export namespace DashUploadUtils {
nativeWidth,
nativeHeight,
filename,
- ...results
+ ...results,
};
} catch (e: any) {
console.log(e);
@@ -340,12 +361,14 @@ export namespace DashUploadUtils {
rename(sourcePath, destinationPath, error => {
resolve({
source: file,
- result: error ? error : {
- accessPaths: {
- agnostic: getAccessPaths(destination, name)
- },
- rawText: text
- }
+ result: error
+ ? error
+ : {
+ accessPaths: {
+ agnostic: getAccessPaths(destination, name),
+ },
+ rawText: text,
+ },
});
});
});
@@ -354,19 +377,19 @@ export namespace DashUploadUtils {
export function getAccessPaths(directory: Directory, fileName: string) {
return {
client: clientPathToFile(directory, fileName),
- server: serverPathToFile(directory, fileName)
+ server: serverPathToFile(directory, fileName),
};
}
- export const UploadInspectedImage = async (metadata: Upload.InspectionResults, filename?: string, prefix = "", cleanUp = true): Promise<Upload.ImageInformation> => {
+ export const UploadInspectedImage = async (metadata: Upload.InspectionResults, filename?: string, prefix = '', cleanUp = true): Promise<Upload.ImageInformation> => {
const { requestable, source, ...remaining } = metadata;
- const resolved = filename || `${prefix}upload_${Utils.GenerateGuid()}.${remaining.contentType.split("/")[1].toLowerCase()}`;
+ const resolved = filename || `${prefix}upload_${Utils.GenerateGuid()}.${remaining.contentType.split('/')[1].toLowerCase()}`;
const { images } = Directory;
const information: Upload.ImageInformation = {
accessPaths: {
- agnostic: getAccessPaths(images, resolved)
+ agnostic: getAccessPaths(images, resolved),
},
- ...metadata
+ ...metadata,
};
const writtenFiles = await outputResizedImages(() => request(requestable), resolved, pathToDirectory(Directory.images));
for (const suffix of Object.keys(writtenFiles)) {
@@ -383,9 +406,9 @@ export namespace DashUploadUtils {
const val: any = layer[key];
if (val instanceof Buffer) {
layer[key] = val.toString();
- } else if (Array.isArray(val) && typeof val[0] === "number") {
+ } else if (Array.isArray(val) && typeof val[0] === 'number') {
layer[key] = Buffer.from(val).toString();
- } else if (typeof val === "object") {
+ } else if (typeof val === 'object') {
bufferConverterRec(val);
}
}
@@ -410,20 +433,20 @@ export namespace DashUploadUtils {
const pngOptions = {
compressionLevel: 9,
adaptiveFiltering: true,
- force: true
+ force: true,
};
export async function outputResizedImages(streamProvider: () => Stream | Promise<Stream>, outputFileName: string, outputDirectory: string) {
const writtenFiles: { [suffix: string]: string } = {};
for (const { resizer, suffix } of resizers(path.extname(outputFileName))) {
- const outputPath = path.resolve(outputDirectory, writtenFiles[suffix] = InjectSize(outputFileName, suffix));
+ const outputPath = path.resolve(outputDirectory, (writtenFiles[suffix] = InjectSize(outputFileName, suffix)));
await new Promise<void>(async (resolve, reject) => {
const source = streamProvider();
let readStream: Stream = source instanceof Promise ? await source : source;
if (resizer) {
readStream = readStream.pipe(resizer.withMetadata());
}
- readStream.pipe(createWriteStream(outputPath)).on("close", resolve).on("error", reject);
+ readStream.pipe(createWriteStream(outputPath)).on('close', resolve).on('error', reject);
});
}
return writtenFiles;
@@ -442,15 +465,14 @@ export namespace DashUploadUtils {
initial = initial.webp();
} else if (tiffs.includes(ext)) {
initial = initial.tiff();
- } else if (ext === ".gif") {
+ } else if (ext === '.gif') {
initial = undefined;
}
return {
resizer: initial,
- suffix
+ suffix,
};
- })
+ }),
];
}
-
-} \ No newline at end of file
+}