From e6d1dd82f5d472bddaf66f9210142249d6fa09aa Mon Sep 17 00:00:00 2001 From: Naafiyan Ahmed Date: Wed, 6 Jul 2022 20:21:04 -0400 Subject: added endpoint routes for csv and integrated chartjs --- src/server/ApiManagers/DataVizManager.ts | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 src/server/ApiManagers/DataVizManager.ts (limited to 'src/server/ApiManagers') diff --git a/src/server/ApiManagers/DataVizManager.ts b/src/server/ApiManagers/DataVizManager.ts new file mode 100644 index 000000000..0d43130d1 --- /dev/null +++ b/src/server/ApiManagers/DataVizManager.ts @@ -0,0 +1,26 @@ +import { csvParser, csvToString } from "../DataVizUtils"; +import { Method, _success } from "../RouteManager"; +import ApiManager, { Registration } from "./ApiManager"; +import { Directory, serverPathToFile } from "./UploadManager"; +import * as path from 'path'; + +export default class DataVizManager extends ApiManager { + protected initialize(register: Registration): void { + register({ + method: Method.GET, + subscription: "/csvData", + secureHandler: async ({ req, res }) => { + const uri = req.query.uri as string; + + return new Promise(resolve => { + const name = path.basename(uri); + const sPath = serverPathToFile(Directory.csv, name); + const parsedCsv = csvParser(csvToString(sPath)); + _success(res, parsedCsv); + resolve(); + }); + } + }); + } + +} \ No newline at end of file -- cgit v1.2.3-70-g09d2 From 0e55893d0f7f2a0aa5098df73d0ece5a7f1a4ddf Mon Sep 17 00:00:00 2001 From: bobzel Date: Wed, 15 Mar 2023 22:33:22 -0400 Subject: fixed up Clone() and export/import collection to work with links, presentations, and contexts better. --- .eslintrc.json | 4 ++ package-lock.json | 78 +++++++++++++--------- package.json | 2 +- .../collectionFreeForm/CollectionFreeFormView.tsx | 6 +- src/client/views/nodes/DocumentView.tsx | 1 + src/client/views/nodes/trails/PresBox.tsx | 7 +- src/decycler/decycler.d.ts | 2 + src/decycler/decycler.js | 51 ++++++++++++++ src/fields/Doc.ts | 75 ++++++++++++--------- src/server/ApiManagers/UploadManager.ts | 24 +++---- tsconfig.json | 24 ++----- 11 files changed, 170 insertions(+), 104 deletions(-) create mode 100644 src/decycler/decycler.d.ts create mode 100644 src/decycler/decycler.js (limited to 'src/server/ApiManagers') diff --git a/.eslintrc.json b/.eslintrc.json index b9f8e1b7a..43bb53566 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -10,5 +10,9 @@ "object-shorthand": "off", "class-methods-use-this": "off", "single-quote": "off" + }, + "parserOptions": { + "ecmaVersion": 11, + "sourceType": "module" } } diff --git a/package-lock.json b/package-lock.json index 4695adf40..cc51ad9e0 100644 --- a/package-lock.json +++ b/package-lock.json @@ -590,15 +590,30 @@ "resolved": "https://registry.npmjs.org/@emotion/weak-memoize/-/weak-memoize-0.2.5.tgz", "integrity": "sha512-6U71C2Wp7r5XtFtQzYrW5iKFT67OixrSxjI4MptCHzdSVlgabczzqLe0ZSgnub/5Kp4hSbpDB1tMytZY9pwxxA==" }, + "@eslint-community/eslint-utils": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.2.0.tgz", + "integrity": "sha512-gB8T4H4DEfX2IV9zGDJPOBgP1e/DbfCPDTtEqUMckpvzS1OYtva8JdFYBqMwYk7xAQ429WGF/UPqn8uQ//h2vQ==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^3.3.0" + } + }, + "@eslint-community/regexpp": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.4.0.tgz", + "integrity": "sha512-A9983Q0LnDGdLPjxyXQ00sbV+K+O+ko2Dr+CZigbHWtX9pNfxlaBkMR8X1CztI73zuEyEBXTVjx7CE+/VSwDiQ==", + "dev": true + }, "@eslint/eslintrc": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.4.1.tgz", - "integrity": "sha512-XXrH9Uarn0stsyldqDYq8r++mROmWRI1xKMXa640Bb//SY1+ECYX6VzT6Lcx5frD0V30XieqJ0oX9I2Xj5aoMA==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.0.1.tgz", + "integrity": "sha512-eFRmABvW2E5Ho6f5fHLqgena46rOj7r7OKHYfLElqcBfGFHHpjBhivyi5+jOEQuSpdc/1phIZJlbC2te+tZNIw==", "dev": true, "requires": { "ajv": "^6.12.4", "debug": "^4.3.2", - "espree": "^9.4.0", + "espree": "^9.5.0", "globals": "^13.19.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", @@ -660,6 +675,12 @@ } } }, + "@eslint/js": { + "version": "8.36.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.36.0.tgz", + "integrity": "sha512-lxJ9R5ygVm8ZWgYdUweoq5ownDlJ4upvoWmO4eLxBYHdMo+vZ/Rx0EN6MbKWDJOSUGrqJy2Gt+Dyv/VKml0fjg==", + "dev": true + }, "@ffmpeg/core": { "version": "0.10.0", "resolved": "https://registry.npmjs.org/@ffmpeg/core/-/core-0.10.0.tgz", @@ -6683,12 +6704,15 @@ } }, "eslint": { - "version": "8.34.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.34.0.tgz", - "integrity": "sha512-1Z8iFsucw+7kSqXNZVslXS8Ioa4u2KM7GPwuKtkTFAqZ/cHMcEaR+1+Br0wLlot49cNxIiZk5wp8EAbPcYZxTg==", + "version": "8.36.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.36.0.tgz", + "integrity": "sha512-Y956lmS7vDqomxlaaQAHVmeb4tNMp2FWIvU/RnU5BD3IKMD/MJPr76xdyr68P8tV1iNMvN2mRK0yy3c+UjL+bw==", "dev": true, "requires": { - "@eslint/eslintrc": "^1.4.1", + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.4.0", + "@eslint/eslintrc": "^2.0.1", + "@eslint/js": "8.36.0", "@humanwhocodes/config-array": "^0.11.8", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", @@ -6699,10 +6723,9 @@ "doctrine": "^3.0.0", "escape-string-regexp": "^4.0.0", "eslint-scope": "^7.1.1", - "eslint-utils": "^3.0.0", "eslint-visitor-keys": "^3.3.0", - "espree": "^9.4.0", - "esquery": "^1.4.0", + "espree": "^9.5.0", + "esquery": "^1.4.2", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^6.0.1", @@ -6723,7 +6746,6 @@ "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.1", - "regexpp": "^3.2.0", "strip-ansi": "^6.0.1", "strip-json-comments": "^3.1.0", "text-table": "^0.2.0" @@ -6811,6 +6833,15 @@ "estraverse": "^5.2.0" } }, + "esquery": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", + "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", + "dev": true, + "requires": { + "estraverse": "^5.1.0" + } + }, "estraverse": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", @@ -7852,23 +7883,6 @@ "estraverse": "^4.1.1" } }, - "eslint-utils": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", - "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", - "dev": true, - "requires": { - "eslint-visitor-keys": "^2.0.0" - }, - "dependencies": { - "eslint-visitor-keys": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", - "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", - "dev": true - } - } - }, "eslint-visitor-keys": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz", @@ -7876,9 +7890,9 @@ "dev": true }, "espree": { - "version": "9.4.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-9.4.1.tgz", - "integrity": "sha512-XwctdmTO6SIvCzd9810yyNzIrOrqNYV9Koizx4C/mRhf9uq0o4yHoCEU/670pOxOL/MSraektvSAji79kX90Vg==", + "version": "9.5.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.5.0.tgz", + "integrity": "sha512-JPbJGhKc47++oo4JkEoTe2wjy4fmMwvFpgJT9cQzmfXKp22Dr6Hf1tdCteLz1h0P3t+mGvWZ+4Uankvh8+c6zw==", "dev": true, "requires": { "acorn": "^8.8.0", diff --git a/package.json b/package.json index 00ce356f9..2c4c41917 100644 --- a/package.json +++ b/package.json @@ -98,7 +98,7 @@ "cross-env": "^5.2.1", "css-loader": "^2.1.1", "dotenv": "^8.6.0", - "eslint": "^8.18.0", + "eslint": "^8.36.0", "eslint-config-airbnb": "^19.0.4", "eslint-config-node": "^4.1.0", "eslint-config-prettier": "^8.5.0", diff --git a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx index 15d8144fc..f0c140ef1 100644 --- a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx +++ b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx @@ -1796,11 +1796,7 @@ export class CollectionFreeFormView extends CollectionSubView Doc.Zip(this.props.Document) }); - moreItems.push({ description: 'Import exported collection', icon: 'upload', event: ({ x, y }) => this.importDocument(e.clientX, e.clientY) }); - } + moreItems.push({ description: 'Import exported collection', icon: 'upload', event: ({ x, y }) => this.importDocument(e.clientX, e.clientY) }); !mores && ContextMenu.Instance.addItem({ description: 'More...', subitems: moreItems, icon: 'eye' }); }; diff --git a/src/client/views/nodes/DocumentView.tsx b/src/client/views/nodes/DocumentView.tsx index 02af30d0c..b7a760c1e 100644 --- a/src/client/views/nodes/DocumentView.tsx +++ b/src/client/views/nodes/DocumentView.tsx @@ -1002,6 +1002,7 @@ export class DocumentViewInternal extends DocComponent Utils.CopyText(Doc.globalServerPath(this.props.Document)), icon: 'fingerprint' }); } + moreItems.push({ description: 'Export collection', icon: 'download', event: async () => Doc.Zip(this.props.Document) }); } if (this.props.removeDocument && !Doc.IsSystem(this.rootDoc) && Doc.ActiveDashboard !== this.props.Document) { diff --git a/src/client/views/nodes/trails/PresBox.tsx b/src/client/views/nodes/trails/PresBox.tsx index be40b3592..e79e7472a 100644 --- a/src/client/views/nodes/trails/PresBox.tsx +++ b/src/client/views/nodes/trails/PresBox.tsx @@ -570,9 +570,9 @@ export class PresBox extends ViewBoxBaseComponent() { } } else { if (bestTarget._panX !== activeItem.presPanX || bestTarget._panY !== activeItem.presPanY || bestTarget._viewScale !== activeItem.presViewScale) { - bestTarget._panX = activeItem.presPanX; - bestTarget._panY = activeItem.presPanY; - bestTarget._viewScale = activeItem.presViewScale; + bestTarget._panX = activeItem.presPanX ?? bestTarget._panX; + bestTarget._panY = activeItem.presPanY ?? bestTarget._panY; + bestTarget._viewScale = activeItem.presViewScale ?? bestTarget._viewScale; changed = true; } } @@ -717,6 +717,7 @@ export class PresBox extends ViewBoxBaseComponent() { zoomTime: activeItem.presMovement === PresMovement.Jump ? 0 : Math.min(Math.max(effect ? 750 : 500, (effect ? 0.2 : 1) * presTime), presTime), effect: activeItem, noSelect: true, + openLocation: OpenWhere.addLeft, anchorDoc: activeItem, easeFunc: StrCast(activeItem.presEaseFunc, 'ease') as any, zoomTextSelections: BoolCast(activeItem.presZoomText), diff --git a/src/decycler/decycler.d.ts b/src/decycler/decycler.d.ts new file mode 100644 index 000000000..84620f79c --- /dev/null +++ b/src/decycler/decycler.d.ts @@ -0,0 +1,2 @@ +export declare const decycle: Function; +export declare const retrocycle: Function; diff --git a/src/decycler/decycler.js b/src/decycler/decycler.js new file mode 100644 index 000000000..7fb8a45c7 --- /dev/null +++ b/src/decycler/decycler.js @@ -0,0 +1,51 @@ +/// +/// this is a modified copy of the npm project: https://www.npmjs.com/package/json-decycle +/// the original code is used as replacer when stringifying JSON objects that have cycles. +/// However, we want an additional replacer that stringifies Dash Fields and Docs in a custom way. +/// So this modified code allows for a custom replacer to be added to this object that will run before this replacer +/// + +const g = e => typeof e === 'object' && e != null && !(e instanceof Boolean) && !(e instanceof Date) && !(e instanceof Number) && !(e instanceof RegExp) && !(e instanceof String); +const b = e => String('#') + e.map(t => String(t).replace(/~/g, '~0').replace(/\//g, '~1')).join('/'); +// eslint-disable-next-line node/no-unsupported-features/es-syntax +export function decycle(replacer) { + const e = new WeakMap(); + return function (n, rr) { + const r = replacer(n, rr); + if (n !== '$ref' && g(r)) { + if (e.has(r)) return { $ref: b(e.get(r)) }; + e.set(r, [...(e.get(this) === undefined ? [] : e.get(this)), n]); + } + return r; + }; +} +// eslint-disable-next-line node/no-unsupported-features/es-syntax +export function retrocycle() { + const e = new WeakMap(); + const t = new WeakMap(); + const n = new Set(); + function r(o) { + const c = o.$ref.slice(1).split('/'); + let s; + let a = this; + // eslint-disable-next-line no-plusplus + for (let p = 0; p < c.length; p++) { + s = c[p].replace(/~1/g, '/').replace(/~0/g, '~'); + a = a[s]; + } + const f = e.get(o); + f[t.get(o)] = a; + } + return function (c, s) { + if (c === '$ref') n.add(this); + else if (g(s)) { + const f = c === '' && Object.keys(this).length === 1; + if (f) n.forEach(r, this); + else { + e.set(s, this); + t.set(s, c); + } + } + return s; + }; +} diff --git a/src/fields/Doc.ts b/src/fields/Doc.ts index de94ed5db..168e29dd5 100644 --- a/src/fields/Doc.ts +++ b/src/fields/Doc.ts @@ -10,6 +10,7 @@ import { scriptingGlobal, ScriptingGlobals } from '../client/util/ScriptingGloba import { SelectionManager } from '../client/util/SelectionManager'; import { afterDocDeserialize, autoObject, Deserializable, SerializationHelper } from '../client/util/SerializationHelper'; import { UndoManager } from '../client/util/UndoManager'; +import { decycle } from '../decycler/decycler'; import { DashColor, incrementTitleCopy, intersectRect, Utils } from '../Utils'; import { DateField } from './DateField'; import { Copy, HandleUpdate, Id, OnUpdate, Parent, Self, SelfProxy, ToScriptString, ToString, Update } from './FieldSymbols'; @@ -25,7 +26,6 @@ import { Cast, DocCast, FieldValue, NumCast, StrCast, ToConstructor } from './Ty import { AudioField, ImageField, MapField, PdfField, VideoField, WebField } from './URLField'; import { deleteProperty, GetEffectiveAcl, getField, getter, makeEditable, makeReadOnly, normalizeEmail, setter, SharingPermissions, updateFunction } from './util'; import JSZip = require('jszip'); - export namespace Field { export function toKeyValueString(doc: Doc, key: string): string { const onDelegate = Object.keys(doc).includes(key); @@ -701,22 +701,12 @@ export namespace Doc { return bestAlias ?? Doc.MakeAlias(doc); } - export async function makeClone( - doc: Doc, - cloneMap: Map, - linkMap: Map, - rtfs: { copy: Doc; key: string; field: RichTextField }[], - exclusions: string[], - topLevelExclusions: string[], - dontCreate: boolean, - asBranch: boolean - ): Promise { + export async function makeClone(doc: Doc, cloneMap: Map, linkMap: Map, rtfs: { copy: Doc; key: string; field: RichTextField }[], exclusions: string[], dontCreate: boolean, asBranch: boolean): Promise { if (Doc.IsBaseProto(doc)) return doc; if (cloneMap.get(doc[Id])) return cloneMap.get(doc[Id])!; - const copy = dontCreate ? (asBranch ? Cast(doc.branchMaster, Doc, null) || doc : doc) : new Doc(undefined, true); + const copy = dontCreate ? (asBranch ? Cast(doc.branchMaster, Doc, null) ?? doc : doc) : new Doc(undefined, true); cloneMap.set(doc[Id], copy); - const fieldExclusions = doc.type === DocumentType.MARKER ? exclusions.filter(ex => ex !== 'annotationOn') : exclusions; - const filter = [...fieldExclusions, ...topLevelExclusions, ...Cast(doc.cloneFieldFilter, listSpec('string'), [])]; + const filter = [...exclusions, ...Cast(doc.cloneFieldFilter, listSpec('string'), [])]; await Promise.all( Object.keys(doc).map(async key => { if (filter.includes(key)) return; @@ -727,10 +717,10 @@ export namespace Doc { const list = await Cast(doc[key], listSpec(Doc)); const docs = list && (await DocListCastAsync(list))?.filter(d => d instanceof Doc); if (docs !== undefined && docs.length) { - const clones = await Promise.all(docs.map(async d => Doc.makeClone(d, cloneMap, linkMap, rtfs, exclusions, [], dontCreate, asBranch))); + const clones = await Promise.all(docs.map(async d => Doc.makeClone(d, cloneMap, linkMap, rtfs, exclusions, dontCreate, asBranch))); !dontCreate && assignKey(new List(clones)); } else if (doc[key] instanceof Doc) { - assignKey(key.includes('layout[') ? undefined : key.startsWith('layout') ? (doc[key] as Doc) : await Doc.makeClone(doc[key] as Doc, cloneMap, linkMap, rtfs, exclusions, [], dontCreate, asBranch)); // reference documents except copy documents that are expanded template fields + assignKey(key.includes('layout[') ? undefined : key.startsWith('layout') ? (doc[key] as Doc) : await Doc.makeClone(doc[key] as Doc, cloneMap, linkMap, rtfs, exclusions, dontCreate, asBranch)); // reference documents except copy documents that are expanded template fields } else { !dontCreate && assignKey(ObjectField.MakeCopy(field)); if (field instanceof RichTextField) { @@ -740,13 +730,12 @@ export namespace Doc { } } }; - if (key === 'proto') { - if (doc[key] instanceof Doc) { - assignKey(await Doc.makeClone(doc[key] as Doc, cloneMap, linkMap, rtfs, exclusions, [], dontCreate, asBranch)); - } - } else if (key === 'anchor1' || key === 'anchor2') { - if (doc[key] instanceof Doc) { - assignKey(await Doc.makeClone(doc[key] as Doc, cloneMap, linkMap, rtfs, exclusions, [], true, asBranch)); + const docAtKey = doc[key]; + if (docAtKey instanceof Doc) { + if (!Doc.IsSystem(docAtKey) && (key === 'annotationOn' || (key === 'proto' && cloneMap.has(doc[Id])) || ((key === 'anchor1' || key === 'anchor2') && doc.author === Doc.CurrentUserEmail))) { + assignKey(await Doc.makeClone(docAtKey, cloneMap, linkMap, rtfs, exclusions, dontCreate, asBranch)); + } else { + assignKey(docAtKey); } } else { if (field instanceof RefField) { @@ -765,8 +754,8 @@ export namespace Doc { }) ); for (const link of Array.from(doc[DirectLinksSym])) { - const linkClone = await Doc.makeClone(link, cloneMap, linkMap, rtfs, exclusions, [], dontCreate, asBranch); - linkMap.set(link, linkClone); + const linkClone = await Doc.makeClone(link, cloneMap, linkMap, rtfs, exclusions, dontCreate, asBranch); + linkMap.set(link[Id], linkClone); } if (!dontCreate) { Doc.SetInPlace(copy, 'title', (asBranch ? 'BRANCH: ' : 'CLONE: ') + doc.title, true); @@ -779,11 +768,29 @@ export namespace Doc { Doc.AddFileOrphan(copy); return copy; } + export function repairClone(doc: Doc, cloned: Doc[], visited: Set) { + if (visited.has(doc)) return; + visited.add(doc); + Object.keys(doc).map(key => { + const docAtKey = DocCast(doc[key]); + if (docAtKey && !Doc.IsSystem(docAtKey)) { + if (!cloned.includes(docAtKey)) { + doc[key] = undefined; + } else { + repairClone(docAtKey, cloned, visited); + } + } + }); + } export async function MakeClone(doc: Doc, dontCreate: boolean = false, asBranch = false, cloneMap: Map = new Map()) { - const linkMap = new Map(); + const linkMap = new Map(); const rtfMap: { copy: Doc; key: string; field: RichTextField }[] = []; - const copy = await Doc.makeClone(doc, cloneMap, linkMap, rtfMap, ['cloneOf', 'branches', 'branchOf'], ['context'], dontCreate, asBranch); - Array.from(linkMap.entries()).map((links: Doc[]) => LinkManager.Instance.addLink(links[1], true)); + const copy = await Doc.makeClone(doc, cloneMap, linkMap, rtfMap, ['cloneOf', 'branches', 'branchOf'], dontCreate, asBranch); + const repaired = new Set(); + const linkedDocs = Array.from(linkMap.values()); + const clonedDocs = [...Array.from(cloneMap.values()), ...linkedDocs]; + clonedDocs.map(clone => Doc.repairClone(clone, Array.from(cloneMap.values()), repaired)); + linkedDocs.map((link: Doc) => LinkManager.Instance.addLink(link, true)); rtfMap.map(({ copy, key, field }) => { const replacer = (match: any, attr: string, id: string, offset: any, string: any) => { const mapped = cloneMap.get(id); @@ -797,7 +804,7 @@ export namespace Doc { const re = new RegExp(regex, 'g'); copy[key] = new RichTextField(field.Data.replace(/("textId":|"audioId":|"anchorId":)"([^"]+)"/g, replacer).replace(re, replacer2), field.Text); }); - return { clone: copy, map: cloneMap }; + return { clone: copy, map: cloneMap, linkMap }; } export async function Zip(doc: Doc) { @@ -806,9 +813,10 @@ export namespace Doc { // a.href = url; // a.download = `DocExport-${this.props.Document[Id]}.zip`; // a.click(); - const { clone, map } = await Doc.MakeClone(doc, true); + const { clone, map, linkMap } = await Doc.MakeClone(doc, true); + clone.LINKS = new List(Array.from(linkMap.values())); function replacer(key: any, value: any) { - if (['branchOf', 'cloneOf', 'context', 'cursors'].includes(key)) return undefined; + if (['branchOf', 'cloneOf', 'cursors'].includes(key)) return undefined; else if (value instanceof Doc) { if (key !== 'field' && Number.isNaN(Number(key))) { const __fields = value[FieldsSym](); @@ -833,7 +841,7 @@ export namespace Doc { const docs: { [id: string]: any } = {}; Array.from(map.entries()).forEach(f => (docs[f[0]] = f[1])); - const docString = JSON.stringify({ id: doc[Id], docs }, replacer); + const docString = JSON.stringify({ id: doc[Id], docs }, decycle(replacer)); const zip = new JSZip(); @@ -1520,7 +1528,8 @@ export namespace Doc { const response = await fetch(upload, { method: 'POST', body: formData }); const json = await response.json(); if (json !== 'error') { - const doc = await DocServer.GetRefField(json); + const doc = DocCast(await DocServer.GetRefField(json)); + (await DocListCastAsync(doc?.LINKS))?.forEach(link => LinkManager.Instance.addLink(link)); return doc; } } diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index fe4c475c9..9bacbd5c8 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -1,19 +1,19 @@ -import ApiManager, { Registration } from './ApiManager'; -import { Method, _success } from '../RouteManager'; import * as formidable from 'formidable'; -import v4 = require('uuid/v4'); -const AdmZip = require('adm-zip'); -import { extname, basename, dirname } from 'path'; import { createReadStream, createWriteStream, unlink, writeFile } from 'fs'; -import { publicDirectory, filesDirectory } from '..'; -import { Database } from '../database'; -import { DashUploadUtils, InjectSize, SizeSuffix } from '../DashUploadUtils'; +import { basename, dirname, extname, normalize } from 'path'; import * as sharp from 'sharp'; -import { AcceptableMedia, Upload } from '../SharedMediaTypes'; -import { normalize } from 'path'; +import { filesDirectory, publicDirectory } from '..'; +import { retrocycle } from '../../decycler/decycler'; +import { DashUploadUtils, InjectSize, SizeSuffix } from '../DashUploadUtils'; +import { Database } from '../database'; +import { Method, _success } from '../RouteManager'; import RouteSubscriber from '../RouteSubscriber'; -const imageDataUri = require('image-data-uri'); +import { AcceptableMedia, Upload } from '../SharedMediaTypes'; +import ApiManager, { Registration } from './ApiManager'; import { SolrManager } from './SearchManager'; +import v4 = require('uuid/v4'); +const AdmZip = require('adm-zip'); +const imageDataUri = require('image-data-uri'); const fs = require('fs'); export enum Directory { @@ -252,7 +252,7 @@ export default class UploadManager extends ApiManager { }); const json = zip.getEntry('doc.json'); try { - const data = JSON.parse(json.getData().toString('utf8')); + const data = JSON.parse(json.getData().toString('utf8'), retrocycle()); const datadocs = data.docs; id = getId(data.id); const docs = Object.keys(datadocs).map(key => datadocs[key]); diff --git a/tsconfig.json b/tsconfig.json index 993ab13b9..bff9255db 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -2,34 +2,22 @@ "compilerOptions": { "target": "es5", "downlevelIteration": true, - // "module": "system", "removeComments": true, "experimentalDecorators": true, "allowSyntheticDefaultImports": true, + "moduleDetection": "auto", "strict": true, "jsx": "react", "allowJs": true, "sourceMap": true, "outDir": "dist", - "lib": [ - "dom", - "es2015" - ], - "typeRoots": [ - "node_modules/@types", - "./src/typings" - ], - "types": [ - "youtube", - "node" - ] + "lib": ["dom", "es2015"], + "typeRoots": ["node_modules/@types", "./src/typings"], + "types": ["youtube", "node"] }, // "exclude": [ // "node_modules", // "static" // ], - "typeRoots": [ - "./node_modules/@types", - "./src/typings" - ] -} \ No newline at end of file + "typeRoots": ["./node_modules/@types", "./src/typings"] +} -- cgit v1.2.3-70-g09d2 From d2bca182a311e95515bbff8fb378b29918fe99d7 Mon Sep 17 00:00:00 2001 From: geireann Date: Thu, 16 Mar 2023 12:00:59 -0400 Subject: fixed export/import collectoin --- src/fields/Doc.ts | 34 +++++++++++++++++++-------------- src/server/ApiManagers/UploadManager.ts | 4 +++- 2 files changed, 23 insertions(+), 15 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/fields/Doc.ts b/src/fields/Doc.ts index 168e29dd5..6543679ad 100644 --- a/src/fields/Doc.ts +++ b/src/fields/Doc.ts @@ -732,7 +732,7 @@ export namespace Doc { }; const docAtKey = doc[key]; if (docAtKey instanceof Doc) { - if (!Doc.IsSystem(docAtKey) && (key === 'annotationOn' || (key === 'proto' && cloneMap.has(doc[Id])) || ((key === 'anchor1' || key === 'anchor2') && doc.author === Doc.CurrentUserEmail))) { + if (!Doc.IsSystem(docAtKey) && (key === 'annotationOn' || key === 'proto'|| ((key === 'anchor1' || key === 'anchor2') && doc.author === Doc.CurrentUserEmail))) { assignKey(await Doc.makeClone(docAtKey, cloneMap, linkMap, rtfs, exclusions, dontCreate, asBranch)); } else { assignKey(docAtKey); @@ -757,8 +757,8 @@ export namespace Doc { const linkClone = await Doc.makeClone(link, cloneMap, linkMap, rtfs, exclusions, dontCreate, asBranch); linkMap.set(link[Id], linkClone); } - if (!dontCreate) { Doc.SetInPlace(copy, 'title', (asBranch ? 'BRANCH: ' : 'CLONE: ') + doc.title, true); + if (!dontCreate) { asBranch ? (copy.branchOf = doc) : (copy.cloneOf = doc); if (!Doc.IsPrototype(copy)) { Doc.AddDocToList(doc, 'branches', Doc.GetProto(copy)); @@ -768,16 +768,19 @@ export namespace Doc { Doc.AddFileOrphan(copy); return copy; } - export function repairClone(doc: Doc, cloned: Doc[], visited: Set) { - if (visited.has(doc)) return; - visited.add(doc); - Object.keys(doc).map(key => { - const docAtKey = DocCast(doc[key]); + export function repairClone(clone: Doc, cloneMap: Map, visited: Set) { + if (visited.has(clone)) return; + visited.add(clone); + Object.keys(clone).filter(key => key !== "cloneOf").map(key => { + const docAtKey = DocCast(clone[key]); if (docAtKey && !Doc.IsSystem(docAtKey)) { - if (!cloned.includes(docAtKey)) { - doc[key] = undefined; + if (!Array.from(cloneMap.values()).includes(docAtKey)) { + if (cloneMap.has(docAtKey[Id])) { + clone[key] = cloneMap.get(docAtKey[Id]); + } + else clone[key] = undefined; } else { - repairClone(docAtKey, cloned, visited); + repairClone(docAtKey, cloneMap, visited); } } }); @@ -789,7 +792,7 @@ export namespace Doc { const repaired = new Set(); const linkedDocs = Array.from(linkMap.values()); const clonedDocs = [...Array.from(cloneMap.values()), ...linkedDocs]; - clonedDocs.map(clone => Doc.repairClone(clone, Array.from(cloneMap.values()), repaired)); + clonedDocs.map(clone => Doc.repairClone(clone, cloneMap, repaired)); linkedDocs.map((link: Doc) => LinkManager.Instance.addLink(link, true)); rtfMap.map(({ copy, key, field }) => { const replacer = (match: any, attr: string, id: string, offset: any, string: any) => { @@ -813,7 +816,7 @@ export namespace Doc { // a.href = url; // a.download = `DocExport-${this.props.Document[Id]}.zip`; // a.click(); - const { clone, map, linkMap } = await Doc.MakeClone(doc, true); + const { clone, map, linkMap } = await Doc.MakeClone(doc, false); clone.LINKS = new List(Array.from(linkMap.values())); function replacer(key: any, value: any) { if (['branchOf', 'cloneOf', 'cursors'].includes(key)) return undefined; @@ -841,7 +844,7 @@ export namespace Doc { const docs: { [id: string]: any } = {}; Array.from(map.entries()).forEach(f => (docs[f[0]] = f[1])); - const docString = JSON.stringify({ id: doc[Id], docs }, decycle(replacer)); + const docString = JSON.stringify({ id: clone[Id], docs }, decycle(replacer)); const zip = new JSZip(); @@ -1527,8 +1530,11 @@ export namespace Doc { formData.append('remap', 'true'); const response = await fetch(upload, { method: 'POST', body: formData }); const json = await response.json(); + console.log(json) if (json !== 'error') { - const doc = DocCast(await DocServer.GetRefField(json)); + await DocServer.GetRefFields(json.docids as string[]); + const doc = DocCast(await DocServer.GetRefField(json.id)); + console.log("Doc = ", doc, doc?.title); (await DocListCastAsync(doc?.LINKS))?.forEach(link => LinkManager.Instance.addLink(link)); return doc; } diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index 9bacbd5c8..5da3dfd3f 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -228,6 +228,7 @@ export default class UploadManager extends ApiManager { form.parse(req, async (_err, fields, files) => { remap = fields.remap !== 'false'; let id: string = ''; + let docids: string[] = []; try { for (const name in files) { const f = files[name]; @@ -257,6 +258,7 @@ export default class UploadManager extends ApiManager { id = getId(data.id); const docs = Object.keys(datadocs).map(key => datadocs[key]); docs.forEach(mapFn); + docids = docs.map(doc => doc.id) await Promise.all( docs.map( (doc: any) => @@ -279,7 +281,7 @@ export default class UploadManager extends ApiManager { unlink(path_2, () => {}); } SolrManager.update(); - res.send(JSON.stringify(id || 'error')); + res.send(JSON.stringify({id, docids} || 'error')); } catch (e) { console.log(e); } -- cgit v1.2.3-70-g09d2 From de0df48cba8e89256a3208fbadfd5afaaa9e22d3 Mon Sep 17 00:00:00 2001 From: bobzel Date: Thu, 16 Mar 2023 21:10:23 -0400 Subject: added importing and exporting of assets (images, pdfs, etc) to collection importer/exporter --- src/JSZipUtils.js | 142 ++++++++++++++++++++++++++++++++ src/fields/Doc.ts | 82 ++++++++++++------ src/server/ApiManagers/UploadManager.ts | 28 ++++--- 3 files changed, 216 insertions(+), 36 deletions(-) create mode 100644 src/JSZipUtils.js (limited to 'src/server/ApiManagers') diff --git a/src/JSZipUtils.js b/src/JSZipUtils.js new file mode 100644 index 000000000..5ce1bd471 --- /dev/null +++ b/src/JSZipUtils.js @@ -0,0 +1,142 @@ +var JSZipUtils = {}; +// just use the responseText with xhr1, response with xhr2. +// The transformation doesn't throw away high-order byte (with responseText) +// because JSZip handles that case. If not used with JSZip, you may need to +// do it, see https://developer.mozilla.org/En/Using_XMLHttpRequest#Handling_binary_data +JSZipUtils._getBinaryFromXHR = function (xhr) { + // for xhr.responseText, the 0xFF mask is applied by JSZip + return xhr.response || xhr.responseText; +}; + +// taken from jQuery +function createStandardXHR() { + try { + return new window.XMLHttpRequest(); + } catch (e) {} +} + +function createActiveXHR() { + try { + return new window.ActiveXObject('Microsoft.XMLHTTP'); + } catch (e) {} +} + +// Create the request object +var createXHR = + typeof window !== 'undefined' && window.ActiveXObject + ? /* Microsoft failed to properly + * implement the XMLHttpRequest in IE7 (can't request local files), + * so we use the ActiveXObject when it is available + * Additionally XMLHttpRequest can be disabled in IE7/IE8 so + * we need a fallback. + */ + function () { + return createStandardXHR() || createActiveXHR(); + } + : // For all other browsers, use the standard XMLHttpRequest object + createStandardXHR; + +/** + * @param {string} path The path to the resource to GET. + * @param {function|{callback: function, progress: function}} options + * @return {Promise|undefined} If no callback is passed then a promise is returned + */ +JSZipUtils.getBinaryContent = function (path, options) { + var promise, resolve, reject; + var callback; + + if (!options) { + options = {}; + } + + // backward compatible callback + if (typeof options === 'function') { + callback = options; + options = {}; + } else if (typeof options.callback === 'function') { + // callback inside options object + callback = options.callback; + } + + if (!callback && typeof Promise !== 'undefined') { + promise = new Promise(function (_resolve, _reject) { + resolve = _resolve; + reject = _reject; + }); + } else { + resolve = function (data) { + callback(null, data); + }; + reject = function (err) { + callback(err, null); + }; + } + + /* + * Here is the tricky part : getting the data. + * In firefox/chrome/opera/... setting the mimeType to 'text/plain; charset=x-user-defined' + * is enough, the result is in the standard xhr.responseText. + * cf https://developer.mozilla.org/En/XMLHttpRequest/Using_XMLHttpRequest#Receiving_binary_data_in_older_browsers + * In IE <= 9, we must use (the IE only) attribute responseBody + * (for binary data, its content is different from responseText). + * In IE 10, the 'charset=x-user-defined' trick doesn't work, only the + * responseType will work : + * http://msdn.microsoft.com/en-us/library/ie/hh673569%28v=vs.85%29.aspx#Binary_Object_upload_and_download + * + * I'd like to use jQuery to avoid this XHR madness, but it doesn't support + * the responseType attribute : http://bugs.jquery.com/ticket/11461 + */ + try { + var xhr = createXHR(); + + xhr.open('GET', path, true); + + // recent browsers + if ('responseType' in xhr) { + xhr.responseType = 'arraybuffer'; + } + + // older browser + if (xhr.overrideMimeType) { + xhr.overrideMimeType('text/plain; charset=x-user-defined'); + } + + xhr.onreadystatechange = function (event) { + // use `xhr` and not `this`... thanks IE + if (xhr.readyState === 4) { + if (xhr.status === 200 || xhr.status === 0) { + try { + resolve(JSZipUtils._getBinaryFromXHR(xhr)); + } catch (err) { + reject(new Error(err)); + } + } else { + reject(new Error('Ajax error for ' + path + ' : ' + this.status + ' ' + this.statusText)); + } + } + }; + + if (options.progress) { + xhr.onprogress = function (e) { + options.progress({ + path: path, + originalEvent: e, + percent: (e.loaded / e.total) * 100, + loaded: e.loaded, + total: e.total, + }); + }; + } + + xhr.send(); + } catch (e) { + reject(new Error(e), null); + } + + // returns a promise or undefined depending on whether a callback was + // provided + return promise; +}; + +// export +module.exports = JSZipUtils; diff --git a/src/fields/Doc.ts b/src/fields/Doc.ts index 6543679ad..deda8aa1f 100644 --- a/src/fields/Doc.ts +++ b/src/fields/Doc.ts @@ -26,6 +26,7 @@ import { Cast, DocCast, FieldValue, NumCast, StrCast, ToConstructor } from './Ty import { AudioField, ImageField, MapField, PdfField, VideoField, WebField } from './URLField'; import { deleteProperty, GetEffectiveAcl, getField, getter, makeEditable, makeReadOnly, normalizeEmail, setter, SharingPermissions, updateFunction } from './util'; import JSZip = require('jszip'); +import * as JSZipUtils from '../JSZipUtils'; export namespace Field { export function toKeyValueString(doc: Doc, key: string): string { const onDelegate = Object.keys(doc).includes(key); @@ -732,7 +733,7 @@ export namespace Doc { }; const docAtKey = doc[key]; if (docAtKey instanceof Doc) { - if (!Doc.IsSystem(docAtKey) && (key === 'annotationOn' || key === 'proto'|| ((key === 'anchor1' || key === 'anchor2') && doc.author === Doc.CurrentUserEmail))) { + if (!Doc.IsSystem(docAtKey) && (key === 'annotationOn' || key === 'proto' || ((key === 'anchor1' || key === 'anchor2') && doc.author === Doc.CurrentUserEmail))) { assignKey(await Doc.makeClone(docAtKey, cloneMap, linkMap, rtfs, exclusions, dontCreate, asBranch)); } else { assignKey(docAtKey); @@ -757,7 +758,7 @@ export namespace Doc { const linkClone = await Doc.makeClone(link, cloneMap, linkMap, rtfs, exclusions, dontCreate, asBranch); linkMap.set(link[Id], linkClone); } - Doc.SetInPlace(copy, 'title', (asBranch ? 'BRANCH: ' : 'CLONE: ') + doc.title, true); + Doc.SetInPlace(copy, 'title', (asBranch ? 'BRANCH: ' : 'CLONE: ') + doc.title, true); if (!dontCreate) { asBranch ? (copy.branchOf = doc) : (copy.cloneOf = doc); if (!Doc.IsPrototype(copy)) { @@ -768,22 +769,23 @@ export namespace Doc { Doc.AddFileOrphan(copy); return copy; } - export function repairClone(clone: Doc, cloneMap: Map, visited: Set) { + export function repairClone(clone: Doc, cloneMap: Map, visited: Set) { if (visited.has(clone)) return; visited.add(clone); - Object.keys(clone).filter(key => key !== "cloneOf").map(key => { - const docAtKey = DocCast(clone[key]); - if (docAtKey && !Doc.IsSystem(docAtKey)) { - if (!Array.from(cloneMap.values()).includes(docAtKey)) { - if (cloneMap.has(docAtKey[Id])) { - clone[key] = cloneMap.get(docAtKey[Id]); + Object.keys(clone) + .filter(key => key !== 'cloneOf') + .map(key => { + const docAtKey = DocCast(clone[key]); + if (docAtKey && !Doc.IsSystem(docAtKey)) { + if (!Array.from(cloneMap.values()).includes(docAtKey)) { + if (cloneMap.has(docAtKey[Id])) { + clone[key] = cloneMap.get(docAtKey[Id]); + } else clone[key] = undefined; + } else { + repairClone(docAtKey, cloneMap, visited); } - else clone[key] = undefined; - } else { - repairClone(docAtKey, cloneMap, visited); } - } - }); + }); } export async function MakeClone(doc: Doc, dontCreate: boolean = false, asBranch = false, cloneMap: Map = new Map()) { const linkMap = new Map(); @@ -818,8 +820,9 @@ export namespace Doc { // a.click(); const { clone, map, linkMap } = await Doc.MakeClone(doc, false); clone.LINKS = new List(Array.from(linkMap.values())); + const proms = [] as string[]; function replacer(key: any, value: any) { - if (['branchOf', 'cloneOf', 'cursors'].includes(key)) return undefined; + if (key && ['branchOf', 'cloneOf', 'cursors'].includes(key)) return undefined; else if (value instanceof Doc) { if (key !== 'field' && Number.isNaN(Number(key))) { const __fields = value[FieldsSym](); @@ -829,9 +832,14 @@ export namespace Doc { } } else if (value instanceof ScriptField) return { script: value.script, __type: 'script' }; else if (value instanceof RichTextField) return { Data: value.Data, Text: value.Text, __type: 'RichTextField' }; - else if (value instanceof ImageField) return { url: value.url.href, __type: 'image' }; - else if (value instanceof PdfField) return { url: value.url.href, __type: 'pdf' }; - else if (value instanceof AudioField) return { url: value.url.href, __type: 'audio' }; + else if (value instanceof ImageField) { + const extension = value.url.href.replace(/.*\./, ''); + proms.push(value.url.href.replace('.' + extension, '_o.' + extension)); + return { url: value.url.href, __type: 'image' }; + } else if (value instanceof PdfField) { + proms.push(value.url.href); + return { url: value.url.href, __type: 'pdf' }; + } else if (value instanceof AudioField) return { url: value.url.href, __type: 'audio' }; else if (value instanceof VideoField) return { url: value.url.href, __type: 'video' }; else if (value instanceof WebField) return { url: value.url.href, __type: 'web' }; else if (value instanceof MapField) return { url: value.url.href, __type: 'map' }; @@ -846,6 +854,32 @@ export namespace Doc { Array.from(map.entries()).forEach(f => (docs[f[0]] = f[1])); const docString = JSON.stringify({ id: clone[Id], docs }, decycle(replacer)); + let generateZIP = (proms: string[]) => { + var zip = new JSZip(); + var count = 0; + var zipFilename = 'dashExport.zip'; + + proms + .filter(url => url.startsWith(window.location.origin)) + .forEach((url, i) => { + var filename = proms[i].replace(window.location.origin + '/', '').replace(/\//g, '%%%'); + // loading a file and add it in a zip file + JSZipUtils.getBinaryContent(url, function (err: any, data: any) { + if (err) { + throw err; // or handle the error + } + zip.file(filename, data, { binary: true }); + count++; + if (count == proms.length) { + zip.file('doc.json', docString); + zip.generateAsync({ type: 'blob' }).then(function (content) { + saveAs(content, zipFilename); + }); + } + }); + }); + }; + generateZIP(proms); const zip = new JSZip(); zip.file('doc.json', docString); @@ -857,10 +891,10 @@ export namespace Doc { // img.file("smile.gif", imgData, {base64: true}); // Generate the zip file asynchronously - zip.generateAsync({ type: 'blob' }).then((content: any) => { - // Force down of the Zip file - saveAs(content, doc.title + '.zip'); // glr: Possibly change the name of the document to match the title? - }); + // zip.generateAsync({ type: 'blob' }).then((content: any) => { + // // Force down of the Zip file + // saveAs(content, doc.title + '.zip'); // glr: Possibly change the name of the document to match the title? + // }); } // // Determines whether the layout needs to be expanded (as a template). @@ -1530,12 +1564,12 @@ export namespace Doc { formData.append('remap', 'true'); const response = await fetch(upload, { method: 'POST', body: formData }); const json = await response.json(); - console.log(json) + console.log(json); if (json !== 'error') { await DocServer.GetRefFields(json.docids as string[]); const doc = DocCast(await DocServer.GetRefField(json.id)); - console.log("Doc = ", doc, doc?.title); (await DocListCastAsync(doc?.LINKS))?.forEach(link => LinkManager.Instance.addLink(link)); + doc.LINKS = undefined; return doc; } } diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index 5da3dfd3f..6e28268a9 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -235,18 +235,22 @@ export default class UploadManager extends ApiManager { const path_2 = Array.isArray(f) ? '' : f.path; const zip = new AdmZip(path_2); zip.getEntries().forEach((entry: any) => { - if (!entry.entryName.startsWith('files/')) return; - let directory = dirname(entry.entryName) + '/'; - const extension = extname(entry.entryName); - const base = basename(entry.entryName).split('.')[0]; + let entryName = entry.entryName.replace(/%%%/g, '/'); + if (!entryName.startsWith('files/')) { + return; + } + const extension = extname(entryName); + const pathname = publicDirectory + '/' + entry.entryName; + const targetname = publicDirectory + '/' + entryName; try { zip.extractEntryTo(entry.entryName, publicDirectory, true, false); - directory = '/' + directory; - - createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + '_o' + extension)); - createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + '_s' + extension)); - createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + '_m' + extension)); - createReadStream(publicDirectory + directory + base + extension).pipe(createWriteStream(publicDirectory + directory + base + '_l' + extension)); + createReadStream(pathname).pipe(createWriteStream(targetname)); + if (extension !== '.pdf') { + createReadStream(pathname).pipe(createWriteStream(targetname.replace('_o' + extension, '_s' + extension))); + createReadStream(pathname).pipe(createWriteStream(targetname.replace('_o' + extension, '_m' + extension))); + createReadStream(pathname).pipe(createWriteStream(targetname.replace('_o' + extension, '_l' + extension))); + } + unlink(pathname, () => {}); } catch (e) { console.log(e); } @@ -258,7 +262,7 @@ export default class UploadManager extends ApiManager { id = getId(data.id); const docs = Object.keys(datadocs).map(key => datadocs[key]); docs.forEach(mapFn); - docids = docs.map(doc => doc.id) + docids = docs.map(doc => doc.id); await Promise.all( docs.map( (doc: any) => @@ -281,7 +285,7 @@ export default class UploadManager extends ApiManager { unlink(path_2, () => {}); } SolrManager.update(); - res.send(JSON.stringify({id, docids} || 'error')); + res.send(JSON.stringify({ id, docids } || 'error')); } catch (e) { console.log(e); } -- cgit v1.2.3-70-g09d2