aboutsummaryrefslogtreecommitdiff
path: root/src/fields/Doc.ts
diff options
context:
space:
mode:
authorbobzel <zzzman@gmail.com>2023-04-18 10:05:01 -0400
committerbobzel <zzzman@gmail.com>2023-04-18 10:05:01 -0400
commitf83e5d34794ef675d4627ecef2ed7042b17b1b06 (patch)
tree33eda0dc9dea7b7a8a71487d5cc32514a51aeb4b /src/fields/Doc.ts
parent57983d95ceeb364e3e0a282daea13035114ddb3f (diff)
cleaning up zip/unzip of files
Diffstat (limited to 'src/fields/Doc.ts')
-rw-r--r--src/fields/Doc.ts128
1 files changed, 54 insertions, 74 deletions
diff --git a/src/fields/Doc.ts b/src/fields/Doc.ts
index 6c808c145..c5af45262 100644
--- a/src/fields/Doc.ts
+++ b/src/fields/Doc.ts
@@ -705,7 +705,7 @@ export namespace Doc {
if (cloneMap.get(doc[Id])) return cloneMap.get(doc[Id])!;
const copy = new Doc(undefined, true);
cloneMap.set(doc[Id], copy);
- const filter = [...exclusions, ...Cast(doc.cloneFieldFilter, listSpec('string'), [])];
+ const filter = [...exclusions, ...StrListCast(doc.cloneFieldFilter)];
await Promise.all(
Object.keys(doc).map(async key => {
if (filter.includes(key)) return;
@@ -725,15 +725,13 @@ export namespace Doc {
const docidsearch = new RegExp('(' + DocsInTextFieldIds.map(exp => '(' + exp + ')').join('|') + ')":"([a-z-A-Z0-9_]*)"', 'g');
const rawdocids = field.Data.match(docidsearch);
const docids = rawdocids?.map((str: string) =>
- DocsInTextFieldIds.reduce((output, exp) => {
- return output.replace(new RegExp(`${exp}":`, 'g'), '');
- }, str)
+ DocsInTextFieldIds.reduce((output, exp) => output.replace(new RegExp(`${exp}":`, 'g'), ''), str)
.replace(/"/g, '')
.trim()
);
const results = docids && (await DocServer.GetRefFields(docids));
const docs = results && Array.from(Object.keys(results)).map(key => DocCast(results[key]));
- docs && docs.map(doc => Doc.makeClone(doc, cloneMap, linkMap, rtfs, exclusions, cloneLinks));
+ docs?.map(doc => doc && Doc.makeClone(doc, cloneMap, linkMap, rtfs, exclusions, cloneLinks));
rtfs.push({ copy, key, field });
}
}
@@ -741,7 +739,7 @@ export namespace Doc {
};
const docAtKey = doc[key];
if (docAtKey instanceof Doc) {
- if (!Doc.IsSystem(docAtKey) && (key.startsWith('layout') || key === 'annotationOn' || key === 'proto' || ((key === 'anchor1' || key === 'anchor2') && doc.author === Doc.CurrentUserEmail))) {
+ if (!Doc.IsSystem(docAtKey) && (key.startsWith('layout') || ['context', 'annotationOn', 'proto'].includes(key) || ((key === 'anchor1' || key === 'anchor2') && doc.author === Doc.CurrentUserEmail))) {
assignKey(await Doc.makeClone(docAtKey, cloneMap, linkMap, rtfs, exclusions, cloneLinks));
} else {
assignKey(docAtKey);
@@ -749,8 +747,7 @@ export namespace Doc {
} else if (field instanceof RefField) {
assignKey(field);
} else if (cfield instanceof ComputedField) {
- assignKey(cfield[Copy]());
- // ComputedField.MakeFunction(cfield.script.originalScript));
+ assignKey(cfield[Copy]()); // ComputedField.MakeFunction(cfield.script.originalScript));
} else if (field instanceof ObjectField) {
await copyObjectField(field);
} else if (field instanceof Promise) {
@@ -804,8 +801,6 @@ export namespace Doc {
const copy = await Doc.makeClone(doc, cloneMap, linkMap, rtfMap, ['cloneOf'], cloneLinks);
const repaired = new Set<Doc>();
const linkedDocs = Array.from(linkMap.values());
- const clonedDocs = [...Array.from(cloneMap.values()), ...linkedDocs];
- clonedDocs.map(clone => Doc.repairClone(clone, cloneMap, repaired));
linkedDocs.map((link: Doc) => LinkManager.Instance.addLink(link, true));
rtfMap.map(({ copy, key, field }) => {
const replacer = (match: any, attr: string, id: string, offset: any, string: any) => {
@@ -816,20 +811,16 @@ export namespace Doc {
const mapped = cloneMap.get(id);
return href + (mapped ? mapped[Id] : id);
};
- const regex = `(${Doc.localServerPath()})([^"]*)`;
- const re = new RegExp(regex, 'g');
+ const re = new RegExp(`(${Doc.localServerPath()})([^"]*)`, 'g');
const docidsearch = new RegExp('(' + DocsInTextFieldIds.map(exp => `"${exp}":`).join('|') + ')"([^"]+)"', 'g');
copy[key] = new RichTextField(field.Data.replace(docidsearch, replacer).replace(re, replacer2), field.Text);
});
+ const clonedDocs = [...Array.from(cloneMap.values()), ...linkedDocs];
+ clonedDocs.map(clone => Doc.repairClone(clone, cloneMap, repaired));
return { clone: copy, map: cloneMap, linkMap };
}
- export async function Zip(doc: Doc) {
- // const a = document.createElement("a");
- // const url = Utils.prepend(`/downloadId/${this.props.Document[Id]}`);
- // a.href = url;
- // a.download = `DocExport-${this.props.Document[Id]}.zip`;
- // a.click();
+ export async function Zip(doc: Doc, zipFilename = 'dashExport.zip') {
const { clone, map, linkMap } = await Doc.MakeClone(doc);
clone.LINKS = new List<Doc>(Array.from(linkMap.values()));
const proms = [] as string[];
@@ -837,76 +828,61 @@ export namespace Doc {
if (key && ['branchOf', 'cloneOf', 'cursors'].includes(key)) return undefined;
else if (value instanceof Doc) {
if (key !== 'field' && Number.isNaN(Number(key))) {
- const __fields = value[FieldsSym]();
- return { id: value[Id], __type: 'Doc', fields: __fields };
- } else {
- return { fieldId: value[Id], __type: 'proxy' };
+ return { id: value[Id], __type: 'Doc', fields: value[FieldsSym]() };
}
- } else if (value instanceof ScriptField) return { script: value.script, __type: 'script' };
- else if (value instanceof RichTextField) return { Data: value.Data, Text: value.Text, __type: 'RichTextField' };
- else if (value instanceof ImageField) {
+ return { fieldId: value[Id], __type: 'proxy' };
+ } else if (value instanceof ImageField) {
const extension = value.url.href.replace(/.*\./, '');
proms.push(value.url.href.replace('.' + extension, '_o.' + extension));
return { url: value.url.href, __type: 'image' };
} else if (value instanceof PdfField) {
proms.push(value.url.href);
return { url: value.url.href, __type: 'pdf' };
- } else if (value instanceof AudioField) return { url: value.url.href, __type: 'audio' };
- else if (value instanceof VideoField) return { url: value.url.href, __type: 'video' };
+ } else if (value instanceof AudioField) {
+ proms.push(value.url.href);
+ return { url: value.url.href, __type: 'audio' };
+ } else if (value instanceof VideoField) {
+ proms.push(value.url.href);
+ return { url: value.url.href, __type: 'video' };
+ } else if (value instanceof ScriptField) return { script: value.script, __type: 'script' };
+ else if (value instanceof RichTextField) return { Data: value.Data, Text: value.Text, __type: 'RichTextField' };
else if (value instanceof WebField) return { url: value.url.href, __type: 'web' };
else if (value instanceof MapField) return { url: value.url.href, __type: 'map' };
else if (value instanceof DateField) return { date: value.toString(), __type: 'date' };
else if (value instanceof ProxyField) return { fieldId: value.fieldId, __type: 'proxy' };
else if (value instanceof Array && key !== 'fields') return { fields: value, __type: 'list' };
else if (value instanceof ComputedField) return { script: value.script, __type: 'computed' };
- else return value;
+ return value;
}
const docs: { [id: string]: any } = {};
Array.from(map.entries()).forEach(f => (docs[f[0]] = f[1]));
- const docString = JSON.stringify({ id: clone[Id], docs }, decycle(replacer));
-
- let generateZIP = (proms: string[]) => {
- var zip = new JSZip();
- var count = 0;
- var zipFilename = 'dashExport.zip';
-
- proms
- .filter(url => url.startsWith(window.location.origin))
- .forEach((url, i) => {
- var filename = proms[i].replace(window.location.origin + '/', '').replace(/\//g, '%%%');
- // loading a file and add it in a zip file
- JSZipUtils.getBinaryContent(url, function (err: any, data: any) {
- if (err) {
- throw err; // or handle the error
- }
- zip.file(filename, data, { binary: true });
- count++;
- if (count == proms.length) {
- zip.file('doc.json', docString);
- zip.generateAsync({ type: 'blob' }).then(function (content) {
- saveAs(content, zipFilename);
- });
- }
- });
- });
- };
- generateZIP(proms);
- const zip = new JSZip();
-
- zip.file('doc.json', docString);
-
- // // Generate a directory within the Zip file structure
- // var img = zip.folder("images");
-
- // // Add a file to the directory, in this case an image with data URI as contents
- // img.file("smile.gif", imgData, {base64: true});
+ const jsonDocs = JSON.stringify({ id: clone[Id], docs }, decycle(replacer));
- // Generate the zip file asynchronously
- zip.generateAsync({ type: 'blob' }).then((content: any) => {
- // Force down of the Zip file
- saveAs(content, doc.title + '.zip'); // glr: Possibly change the name of the document to match the title?
- });
+ const zip = new JSZip();
+ var count = 0;
+ proms
+ .filter(url => url.startsWith(window.location.origin))
+ .forEach((url, i) => {
+ // loading a file and add it in a zip file
+ JSZipUtils.getBinaryContent(url, (err: any, data: any) => {
+ if (err) throw err; // or handle the error
+ // // Generate a directory within the Zip file structure
+ // const assets = zip.folder("assets");
+ // assets.file(filename, data, {binary: true});
+ const assetPathOnServer = proms[i].replace(window.location.origin + '/', '').replace(/\//g, '%%%');
+ zip.file(assetPathOnServer, data, { binary: true });
+ if (++count == proms.length) {
+ zip.file('doc.json', jsonDocs);
+ zip.generateAsync({ type: 'blob' }).then(content => saveAs(content, zipFilename));
+ // const a = document.createElement("a");
+ // const url = Utils.prepend(`/downloadId/${this.props.Document[Id]}`);
+ // a.href = url;
+ // a.download = `DocExport-${this.props.Document[Id]}.zip`;
+ // a.click();
+ }
+ });
+ });
}
const _pendingMap: Map<string, boolean> = new Map();
@@ -963,7 +939,7 @@ export namespace Doc {
// otherwise, it just returns the childDoc
export function GetLayoutDataDocPair(containerDoc: Doc, containerDataDoc: Opt<Doc>, childDoc: Doc) {
if (!childDoc || childDoc instanceof Promise || !Doc.GetProto(childDoc)) {
- console.log('No, no, no!');
+ console.log('Warning: GetLayoutDataDocPair childDoc not defined');
return { layout: childDoc, data: childDoc };
}
const resolvedDataDoc = Doc.AreProtosEqual(containerDataDoc, containerDoc) || (!childDoc.isTemplateDoc && !childDoc.isTemplateForField) ? undefined : containerDataDoc;
@@ -1553,15 +1529,19 @@ export namespace Doc {
}
}
- export async function importDocument(file: File) {
+ ///
+ // imports a previously exported zip file which contains a set of documents and their assets (eg, images, videos)
+ // the 'remap' parameter determines whether the ids of the documents loaded should be kept as they were, or remapped to new ids
+ // If they are not remapped, loading the file will overwrite any existing documents with those ids
+ //
+ export async function importDocument(file: File, remap = false) {
const upload = Utils.prepend('/uploadDoc');
const formData = new FormData();
if (file) {
formData.append('file', file);
- formData.append('remap', 'true');
+ formData.append('remap', remap.toString());
const response = await fetch(upload, { method: 'POST', body: formData });
const json = await response.json();
- console.log(json);
if (json !== 'error') {
await DocServer.GetRefFields(json.docids as string[]);
const doc = DocCast(await DocServer.GetRefField(json.id));