aboutsummaryrefslogtreecommitdiff
path: root/src/fields/Doc.ts
diff options
context:
space:
mode:
Diffstat (limited to 'src/fields/Doc.ts')
-rw-r--r--src/fields/Doc.ts264
1 files changed, 122 insertions, 142 deletions
diff --git a/src/fields/Doc.ts b/src/fields/Doc.ts
index 68279372a..7cc7e0d2a 100644
--- a/src/fields/Doc.ts
+++ b/src/fields/Doc.ts
@@ -14,8 +14,8 @@ import { decycle } from '../decycler/decycler';
import { DashColor, incrementTitleCopy, intersectRect, Utils } from '../Utils';
import { DateField } from './DateField';
import { Copy, HandleUpdate, Id, OnUpdate, Parent, Self, SelfProxy, ToScriptString, ToString, Update } from './FieldSymbols';
-import { InkTool } from './InkField';
-import { List } from './List';
+import { InkField, InkTool } from './InkField';
+import { List, ListFieldName } from './List';
import { ObjectField } from './ObjectField';
import { PrefetchProxy, ProxyField } from './Proxy';
import { FieldId, RefField } from './RefField';
@@ -23,15 +23,19 @@ import { RichTextField } from './RichTextField';
import { listSpec } from './Schema';
import { ComputedField, ScriptField } from './ScriptField';
import { Cast, DocCast, FieldValue, NumCast, StrCast, ToConstructor } from './Types';
-import { AudioField, ImageField, MapField, PdfField, VideoField, WebField } from './URLField';
+import { AudioField, CsvField, ImageField, PdfField, VideoField, WebField } from './URLField';
import { deleteProperty, GetEffectiveAcl, getField, getter, makeEditable, makeReadOnly, normalizeEmail, setter, SharingPermissions, updateFunction } from './util';
import JSZip = require('jszip');
import * as JSZipUtils from '../JSZipUtils';
export namespace Field {
export function toKeyValueString(doc: Doc, key: string): string {
- const onDelegate = Object.keys(doc).includes(key);
+ const onDelegate = Object.keys(doc).includes(key.replace(/^_/, ''));
const field = ComputedField.WithoutComputed(() => FieldValue(doc[key]));
- return !Field.IsField(field) ? '' : (onDelegate ? '=' : '') + (field instanceof ComputedField ? `:=${field.script.originalScript}` : Field.toScriptString(field));
+ return !Field.IsField(field)
+ ? key.startsWith('_')
+ ? '='
+ : ''
+ : (onDelegate ? '=' : '') + (field instanceof ComputedField ? `:=${field.script.originalScript}` : field instanceof ScriptField ? `$=${field.script.originalScript}` : Field.toScriptString(field));
}
export function toScriptString(field: Field): string {
switch (typeof field) {
@@ -96,6 +100,7 @@ export const HighlightSym = Symbol('Highlight');
export const DataSym = Symbol('Data');
export const LayoutSym = Symbol('Layout');
export const FieldsSym = Symbol('Fields');
+export const CssSym = Symbol('Css');
export const AclSym = Symbol('Acl');
export const DirectLinksSym = Symbol('DirectLinks');
export const AclUnset = Symbol('AclUnset');
@@ -337,6 +342,7 @@ export class Doc extends RefField {
@observable private ___fieldKeys: any = {};
/// all of the raw acl's that have been set on this document. Use GetEffectiveAcl to determine the actual ACL of the doc for editing
@observable public [AclSym]: { [key: string]: symbol } = {};
+ @observable public [CssSym]: number = 0; // incrementer denoting a change to CSS layout
@observable public [DirectLinksSym]: Set<Doc> = new Set();
@observable public [AnimationSym]: Opt<Doc>;
@observable public [HighlightSym]: boolean = false;
@@ -694,12 +700,12 @@ export namespace Doc {
// this lists out all the tag ids that can be in a RichTextField that might contain document ids.
// if a document is cloned, we need to make sure to clone all of these referenced documents as well;
export const DocsInTextFieldIds = ['audioId', 'textId', 'anchorId', 'docId'];
- export async function makeClone(doc: Doc, cloneMap: Map<string, Doc>, linkMap: Map<string, Doc>, rtfs: { copy: Doc; key: string; field: RichTextField }[], exclusions: string[], cloneLinks: boolean): Promise<Doc> {
+ export async function makeClone(doc: Doc, cloneMap: Map<string, Doc>, linkMap: Map<string, Doc>, rtfs: { copy: Doc; key: string; field: RichTextField }[], exclusions: string[], pruneDocs: Doc[], cloneLinks: boolean): Promise<Doc> {
if (Doc.IsBaseProto(doc)) return doc;
if (cloneMap.get(doc[Id])) return cloneMap.get(doc[Id])!;
const copy = new Doc(undefined, true);
cloneMap.set(doc[Id], copy);
- const filter = [...exclusions, ...Cast(doc.cloneFieldFilter, listSpec('string'), [])];
+ const filter = [...exclusions, ...StrListCast(doc.cloneFieldFilter)];
await Promise.all(
Object.keys(doc).map(async key => {
if (filter.includes(key)) return;
@@ -710,7 +716,7 @@ export namespace Doc {
const list = await Cast(doc[key], listSpec(Doc));
const docs = list && (await DocListCastAsync(list))?.filter(d => d instanceof Doc);
if (docs !== undefined && docs.length) {
- const clones = await Promise.all(docs.map(async d => Doc.makeClone(d, cloneMap, linkMap, rtfs, exclusions, cloneLinks)));
+ const clones = await Promise.all(docs.map(async d => Doc.makeClone(d, cloneMap, linkMap, rtfs, exclusions, pruneDocs, cloneLinks)));
assignKey(new List<Doc>(clones));
} else {
assignKey(ObjectField.MakeCopy(field));
@@ -719,15 +725,13 @@ export namespace Doc {
const docidsearch = new RegExp('(' + DocsInTextFieldIds.map(exp => '(' + exp + ')').join('|') + ')":"([a-z-A-Z0-9_]*)"', 'g');
const rawdocids = field.Data.match(docidsearch);
const docids = rawdocids?.map((str: string) =>
- DocsInTextFieldIds.reduce((output, exp) => {
- return output.replace(new RegExp(`${exp}":`, 'g'), '');
- }, str)
+ DocsInTextFieldIds.reduce((output, exp) => output.replace(new RegExp(`${exp}":`, 'g'), ''), str)
.replace(/"/g, '')
.trim()
);
const results = docids && (await DocServer.GetRefFields(docids));
const docs = results && Array.from(Object.keys(results)).map(key => DocCast(results[key]));
- docs && docs.map(doc => Doc.makeClone(doc, cloneMap, linkMap, rtfs, exclusions, cloneLinks));
+ docs?.map(doc => doc && Doc.makeClone(doc, cloneMap, linkMap, rtfs, exclusions, pruneDocs, cloneLinks));
rtfs.push({ copy, key, field });
}
}
@@ -735,16 +739,17 @@ export namespace Doc {
};
const docAtKey = doc[key];
if (docAtKey instanceof Doc) {
- if (!Doc.IsSystem(docAtKey) && (key.startsWith('layout') || key === 'annotationOn' || key === 'proto' || ((key === 'anchor1' || key === 'anchor2') && doc.author === Doc.CurrentUserEmail))) {
- assignKey(await Doc.makeClone(docAtKey, cloneMap, linkMap, rtfs, exclusions, cloneLinks));
+ if (pruneDocs.includes(docAtKey)) {
+ // prune doc and do nothing
+ } else if (!Doc.IsSystem(docAtKey) && (key.startsWith('layout') || ['context', 'annotationOn', 'proto'].includes(key) || ((key === 'anchor1' || key === 'anchor2') && doc.author === Doc.CurrentUserEmail))) {
+ assignKey(await Doc.makeClone(docAtKey, cloneMap, linkMap, rtfs, exclusions, pruneDocs, cloneLinks));
} else {
assignKey(docAtKey);
}
} else if (field instanceof RefField) {
assignKey(field);
} else if (cfield instanceof ComputedField) {
- assignKey(cfield[Copy]());
- // ComputedField.MakeFunction(cfield.script.originalScript));
+ assignKey(cfield[Copy]()); // ComputedField.MakeFunction(cfield.script.originalScript));
} else if (field instanceof ObjectField) {
await copyObjectField(field);
} else if (field instanceof Promise) {
@@ -759,7 +764,7 @@ export namespace Doc {
cloneLinks ||
((cloneMap.has(DocCast(link.anchor1)?.[Id]) || cloneMap.has(DocCast(DocCast(link.anchor1)?.annotationOn)?.[Id])) && (cloneMap.has(DocCast(link.anchor2)?.[Id]) || cloneMap.has(DocCast(DocCast(link.anchor2)?.annotationOn)?.[Id])))
) {
- linkMap.set(link[Id], await Doc.makeClone(link, cloneMap, linkMap, rtfs, exclusions, cloneLinks));
+ linkMap.set(link[Id], await Doc.makeClone(link, cloneMap, linkMap, rtfs, exclusions, pruneDocs, cloneLinks));
}
});
Doc.SetInPlace(copy, 'title', 'CLONE: ' + doc.title, true);
@@ -795,11 +800,9 @@ export namespace Doc {
export async function MakeClone(doc: Doc, cloneLinks = true, cloneMap: Map<string, Doc> = new Map()) {
const linkMap = new Map<string, Doc>();
const rtfMap: { copy: Doc; key: string; field: RichTextField }[] = [];
- const copy = await Doc.makeClone(doc, cloneMap, linkMap, rtfMap, ['cloneOf'], cloneLinks);
+ const copy = await Doc.makeClone(doc, cloneMap, linkMap, rtfMap, ['cloneOf'], doc.context ? [DocCast(doc.context)] : [], cloneLinks);
const repaired = new Set<Doc>();
const linkedDocs = Array.from(linkMap.values());
- const clonedDocs = [...Array.from(cloneMap.values()), ...linkedDocs];
- clonedDocs.map(clone => Doc.repairClone(clone, cloneMap, repaired));
linkedDocs.map((link: Doc) => LinkManager.Instance.addLink(link, true));
rtfMap.map(({ copy, key, field }) => {
const replacer = (match: any, attr: string, id: string, offset: any, string: any) => {
@@ -810,104 +813,80 @@ export namespace Doc {
const mapped = cloneMap.get(id);
return href + (mapped ? mapped[Id] : id);
};
- const regex = `(${Doc.localServerPath()})([^"]*)`;
- const re = new RegExp(regex, 'g');
+ const re = new RegExp(`(${Doc.localServerPath()})([^"]*)`, 'g');
const docidsearch = new RegExp('(' + DocsInTextFieldIds.map(exp => `"${exp}":`).join('|') + ')"([^"]+)"', 'g');
copy[key] = new RichTextField(field.Data.replace(docidsearch, replacer).replace(re, replacer2), field.Text);
});
+ const clonedDocs = [...Array.from(cloneMap.values()), ...linkedDocs];
+ clonedDocs.map(clone => Doc.repairClone(clone, cloneMap, repaired));
return { clone: copy, map: cloneMap, linkMap };
}
- export async function Zip(doc: Doc) {
- // const a = document.createElement("a");
- // const url = Utils.prepend(`/downloadId/${this.props.Document[Id]}`);
- // a.href = url;
- // a.download = `DocExport-${this.props.Document[Id]}.zip`;
- // a.click();
+ export async function Zip(doc: Doc, zipFilename = 'dashExport.zip') {
const { clone, map, linkMap } = await Doc.MakeClone(doc);
- clone.LINKS = new List<Doc>(Array.from(linkMap.values()));
- const proms = [] as string[];
+ const proms = new Set<string>();
function replacer(key: any, value: any) {
if (key && ['branchOf', 'cloneOf', 'cursors'].includes(key)) return undefined;
- else if (value instanceof Doc) {
- if (key !== 'field' && Number.isNaN(Number(key))) {
- const __fields = value[FieldsSym]();
- return { id: value[Id], __type: 'Doc', fields: __fields };
- } else {
- return { fieldId: value[Id], __type: 'proxy' };
- }
- } else if (value instanceof ScriptField) return { script: value.script, __type: 'script' };
- else if (value instanceof RichTextField) return { Data: value.Data, Text: value.Text, __type: 'RichTextField' };
- else if (value instanceof ImageField) {
+ if (value instanceof ImageField) {
const extension = value.url.href.replace(/.*\./, '');
- proms.push(value.url.href.replace('.' + extension, '_o.' + extension));
- return { url: value.url.href, __type: 'image' };
- } else if (value instanceof PdfField) {
- proms.push(value.url.href);
- return { url: value.url.href, __type: 'pdf' };
- } else if (value instanceof AudioField) return { url: value.url.href, __type: 'audio' };
- else if (value instanceof VideoField) return { url: value.url.href, __type: 'video' };
- else if (value instanceof WebField) return { url: value.url.href, __type: 'web' };
- else if (value instanceof MapField) return { url: value.url.href, __type: 'map' };
- else if (value instanceof DateField) return { date: value.toString(), __type: 'date' };
- else if (value instanceof ProxyField) return { fieldId: value.fieldId, __type: 'proxy' };
- else if (value instanceof Array && key !== 'fields') return { fields: value, __type: 'list' };
- else if (value instanceof ComputedField) return { script: value.script, __type: 'computed' };
- else return value;
+ proms.add(value.url.href.replace('.' + extension, '_o.' + extension));
+ return SerializationHelper.Serialize(value);
+ }
+ if (value instanceof PdfField || value instanceof AudioField || value instanceof VideoField) {
+ proms.add(value.url.href);
+ return SerializationHelper.Serialize(value);
+ }
+ if (
+ value instanceof Doc ||
+ value instanceof ScriptField ||
+ value instanceof RichTextField ||
+ value instanceof InkField ||
+ value instanceof CsvField ||
+ value instanceof WebField ||
+ value instanceof DateField ||
+ value instanceof ProxyField ||
+ value instanceof ComputedField
+ ) {
+ return SerializationHelper.Serialize(value);
+ }
+ if (value instanceof Array && key !== ListFieldName && key !== InkField.InkDataFieldName) return { fields: value, __type: 'list' };
+ return value;
}
const docs: { [id: string]: any } = {};
+ const links: { [id: string]: any } = {};
Array.from(map.entries()).forEach(f => (docs[f[0]] = f[1]));
- const docString = JSON.stringify({ id: clone[Id], docs }, decycle(replacer));
-
- let generateZIP = (proms: string[]) => {
- var zip = new JSZip();
- var count = 0;
- var zipFilename = 'dashExport.zip';
-
- proms
- .filter(url => url.startsWith(window.location.origin))
- .forEach((url, i) => {
- var filename = proms[i].replace(window.location.origin + '/', '').replace(/\//g, '%%%');
- // loading a file and add it in a zip file
- JSZipUtils.getBinaryContent(url, function (err: any, data: any) {
- if (err) {
- throw err; // or handle the error
- }
- zip.file(filename, data, { binary: true });
- count++;
- if (count == proms.length) {
- zip.file('doc.json', docString);
- zip.generateAsync({ type: 'blob' }).then(function (content) {
- saveAs(content, zipFilename);
- });
- }
- });
- });
- };
- generateZIP(proms);
- const zip = new JSZip();
-
- zip.file('doc.json', docString);
-
- // // Generate a directory within the Zip file structure
- // var img = zip.folder("images");
-
- // // Add a file to the directory, in this case an image with data URI as contents
- // img.file("smile.gif", imgData, {base64: true});
+ Array.from(linkMap.entries()).forEach(l => (links[l[0]] = l[1]));
+ const jsonDocs = JSON.stringify({ id: clone[Id], docs, links }, decycle(replacer));
- // Generate the zip file asynchronously
- zip.generateAsync({ type: 'blob' }).then((content: any) => {
- // Force down of the Zip file
- saveAs(content, doc.title + '.zip'); // glr: Possibly change the name of the document to match the title?
- });
- }
- //
- // Determines whether the layout needs to be expanded (as a template).
- // template expansion is rquired when the layout is a template doc/field and there's a datadoc which isn't equal to the layout template
- //
- export function WillExpandTemplateLayout(layoutDoc: Doc, dataDoc?: Doc) {
- return (layoutDoc.isTemplateForField || layoutDoc.isTemplateDoc) && dataDoc && layoutDoc !== dataDoc;
+ const zip = new JSZip();
+ var count = 0;
+ const promArr = Array.from(proms).filter(url => url.startsWith(window.location.origin));
+ if (!promArr.length) {
+ zip.file('docs.json', jsonDocs);
+ zip.generateAsync({ type: 'blob' }).then(content => saveAs(content, zipFilename));
+ } else
+ promArr.forEach((url, i) => {
+ // loading a file and add it in a zip file
+ JSZipUtils.getBinaryContent(url, (err: any, data: any) => {
+ if (err) throw err; // or handle the error
+ // // Generate a directory within the Zip file structure
+ // const assets = zip.folder("assets");
+ // assets.file(filename, data, {binary: true});
+ const assetPathOnServer = promArr[i].replace(window.location.origin + '/', '').replace(/\//g, '%%%');
+ zip.file(assetPathOnServer, data, { binary: true });
+ console.log(' => ' + url);
+ if (++count === promArr.length) {
+ zip.file('docs.json', jsonDocs);
+ zip.generateAsync({ type: 'blob' }).then(content => saveAs(content, zipFilename));
+ // const a = document.createElement("a");
+ // const url = Utils.prepend(`/downloadId/${this.props.Document[Id]}`);
+ // a.href = url;
+ // a.download = `DocExport-${this.props.Document[Id]}.zip`;
+ // a.click();
+ }
+ });
+ });
}
const _pendingMap: Map<string, boolean> = new Map();
@@ -915,44 +894,34 @@ export namespace Doc {
// Returns an expanded template layout for a target data document if there is a template relationship
// between the two. If so, the layoutDoc is expanded into a new document that inherits the properties
// of the original layout while allowing for individual layout properties to be overridden in the expanded layout.
- // templateArgs should be equivalent to the layout key that generates the template since that's where the template parameters are stored in ()'s at the end of the key.
- // NOTE: the template will have references to "@params" -- the template arguments will be assigned to the '@params' field
- // so that when the @params key is accessed, it will be rewritten as the key that is stored in the 'params' field and
- // the derefence will then occur on the rootDocument (the original document).
- // in the future, field references could be written as @<someparam> and then arguments would be passed in the layout key as:
- // layout_mytemplate(somparam=somearg).
- // then any references to @someparam would be rewritten as accesses to 'somearg' on the rootDocument
- export function expandTemplateLayout(templateLayoutDoc: Doc, targetDoc?: Doc, templateArgs?: string) {
- const args = templateArgs?.match(/\(([a-zA-Z0-9._\-]*)\)/)?.[1].replace('()', '') || StrCast(templateLayoutDoc.PARAMS);
- if ((!args && !WillExpandTemplateLayout(templateLayoutDoc, targetDoc)) || !targetDoc) return templateLayoutDoc;
-
- const templateField = StrCast(templateLayoutDoc.isTemplateForField); // the field that the template renders
+ export function expandTemplateLayout(templateLayoutDoc: Doc, targetDoc?: Doc) {
+ // nothing to do if the layout isn't a template or we don't have a target that's different than the template
+ if (!targetDoc || templateLayoutDoc === targetDoc || (!templateLayoutDoc.isTemplateForField && !templateLayoutDoc.isTemplateDoc)) {
+ return templateLayoutDoc;
+ }
+
+ const templateField = StrCast(templateLayoutDoc.isTemplateForField, Doc.LayoutFieldKey(templateLayoutDoc)); // the field that the template renders
// First it checks if an expanded layout already exists -- if so it will be stored on the dataDoc
// using the template layout doc's id as the field key.
// If it doesn't find the expanded layout, then it makes a delegate of the template layout and
// saves it on the data doc indexed by the template layout's id.
//
- const params = args.split('=').length > 1 ? args.split('=')[0] : 'PARAMS';
- const layoutFielddKey = Doc.LayoutFieldKey(templateLayoutDoc);
- const expandedLayoutFieldKey = (templateField || layoutFielddKey) + '-layout[' + templateLayoutDoc[Id] + (args ? `(${args})` : '') + ']';
+ const expandedLayoutFieldKey = templateField + '-layout[' + templateLayoutDoc[Id] + ']';
let expandedTemplateLayout = targetDoc?.[expandedLayoutFieldKey];
if (templateLayoutDoc.resolvedDataDoc instanceof Promise) {
expandedTemplateLayout = undefined;
_pendingMap.set(targetDoc[Id] + expandedLayoutFieldKey, true);
- } else if (expandedTemplateLayout === undefined && !_pendingMap.get(targetDoc[Id] + expandedLayoutFieldKey + args)) {
- if (templateLayoutDoc.resolvedDataDoc === (targetDoc.rootDocument || Doc.GetProto(targetDoc)) && templateLayoutDoc.PARAMS === StrCast(targetDoc.PARAMS)) {
+ } else if (expandedTemplateLayout === undefined && !_pendingMap.get(targetDoc[Id] + expandedLayoutFieldKey)) {
+ if (templateLayoutDoc.resolvedDataDoc === (targetDoc.rootDocument || Doc.GetProto(targetDoc))) {
expandedTemplateLayout = templateLayoutDoc; // reuse an existing template layout if its for the same document with the same params
} else {
templateLayoutDoc.resolvedDataDoc && (templateLayoutDoc = Cast(templateLayoutDoc.proto, Doc, null) || templateLayoutDoc); // if the template has already been applied (ie, a nested template), then use the template's prototype
if (!targetDoc[expandedLayoutFieldKey]) {
- _pendingMap.set(targetDoc[Id] + expandedLayoutFieldKey + args, true);
+ _pendingMap.set(targetDoc[Id] + expandedLayoutFieldKey, true);
setTimeout(
action(() => {
const newLayoutDoc = Doc.MakeDelegate(templateLayoutDoc, undefined, '[' + templateLayoutDoc.title + ']');
- // the template's arguments are stored in params which is derefenced to find
- // the actual field key where the parameterized template data is stored.
- newLayoutDoc[params] = args !== '...' ? args : ''; // ... signifies the layout has sub template(s) -- so we have to expand the layout for them so that they can get the correct 'rootDocument' field, but we don't need to reassign their params. it would be better if the 'rootDocument' field could be passed dynamically to avoid have to create instances
newLayoutDoc.rootDocument = targetDoc;
const dataDoc = Doc.GetProto(targetDoc);
newLayoutDoc.resolvedDataDoc = dataDoc;
@@ -961,7 +930,7 @@ export namespace Doc {
}
targetDoc[expandedLayoutFieldKey] = newLayoutDoc;
- _pendingMap.delete(targetDoc[Id] + expandedLayoutFieldKey + args);
+ _pendingMap.delete(targetDoc[Id] + expandedLayoutFieldKey);
})
);
}
@@ -974,11 +943,11 @@ export namespace Doc {
// otherwise, it just returns the childDoc
export function GetLayoutDataDocPair(containerDoc: Doc, containerDataDoc: Opt<Doc>, childDoc: Doc) {
if (!childDoc || childDoc instanceof Promise || !Doc.GetProto(childDoc)) {
- console.log('No, no, no!');
+ console.log('Warning: GetLayoutDataDocPair childDoc not defined');
return { layout: childDoc, data: childDoc };
}
- const resolvedDataDoc = Doc.AreProtosEqual(containerDataDoc, containerDoc) || (!childDoc.isTemplateDoc && !childDoc.isTemplateForField && !childDoc.PARAMS) ? undefined : containerDataDoc;
- return { layout: Doc.expandTemplateLayout(childDoc, resolvedDataDoc, '(' + StrCast(containerDoc.PARAMS) + ')'), data: resolvedDataDoc };
+ const resolvedDataDoc = Doc.AreProtosEqual(containerDataDoc, containerDoc) || (!childDoc.isTemplateDoc && !childDoc.isTemplateForField) ? undefined : containerDataDoc;
+ return { layout: Doc.expandTemplateLayout(childDoc, resolvedDataDoc), data: resolvedDataDoc };
}
export function Overwrite(doc: Doc, overwrite: Doc, copyProto: boolean = false): Doc {
@@ -1209,7 +1178,7 @@ export namespace Doc {
return doc[StrCast(doc.layoutKey, 'layout')];
}
export function LayoutFieldKey(doc: Doc): string {
- return StrCast(Doc.Layout(doc).layout).split("'")[1];
+ return StrCast(Doc.Layout(doc).layout).split("'")[1]; // bcz: TODO check on this . used to always reference 'layout', now it uses the layout speicfied by the current layoutKey
}
export function NativeAspect(doc: Doc, dataDoc?: Doc, useDim?: boolean) {
return Doc.NativeWidth(doc, dataDoc, useDim) / (Doc.NativeHeight(doc, dataDoc, useDim) || 1);
@@ -1218,9 +1187,10 @@ export namespace Doc {
return !doc ? 0 : NumCast(doc._nativeWidth, NumCast((dataDoc || doc)[Doc.LayoutFieldKey(doc) + '-nativeWidth'], useWidth ? doc[WidthSym]() : 0));
}
export function NativeHeight(doc?: Doc, dataDoc?: Doc, useHeight?: boolean) {
- const dheight = doc ? NumCast((dataDoc || doc)[Doc.LayoutFieldKey(doc) + '-nativeHeight'], useHeight ? doc[HeightSym]() : 0) : 0;
- const nheight = doc ? (Doc.NativeWidth(doc, dataDoc, useHeight) * doc[HeightSym]()) / doc[WidthSym]() : 0;
- return !doc ? 0 : NumCast(doc._nativeHeight, nheight || dheight);
+ if (!doc) return 0;
+ const nheight = (Doc.NativeWidth(doc, dataDoc, useHeight) * doc[HeightSym]()) / doc[WidthSym]();
+ const dheight = NumCast((dataDoc || doc)[Doc.LayoutFieldKey(doc) + '-nativeHeight'], useHeight ? doc[HeightSym]() : 0);
+ return NumCast(doc._nativeHeight, nheight || dheight);
}
export function SetNativeWidth(doc: Doc, width: number | undefined, fieldKey?: string) {
doc[(fieldKey ?? Doc.LayoutFieldKey(doc)) + '-nativeWidth'] = width;
@@ -1332,6 +1302,7 @@ export namespace Doc {
}
export function LinkEndpoint(linkDoc: Doc, anchorDoc: Doc) {
+ if (linkDoc.anchor2 === anchorDoc || (linkDoc.anchor2 as Doc).annotationOn) return '2';
return Doc.AreProtosEqual(anchorDoc, (linkDoc.anchor1 as Doc).annotationOn as Doc) || Doc.AreProtosEqual(anchorDoc, linkDoc.anchor1 as Doc) ? '1' : '2';
}
@@ -1390,7 +1361,7 @@ export namespace Doc {
});
}
export function UnBrushAllDocs() {
- brushManager.BrushedDoc.clear();
+ runInAction(() => brushManager.BrushedDoc.clear());
}
export function getDocTemplate(doc?: Doc) {
@@ -1415,7 +1386,7 @@ export namespace Doc {
if (typeof value === 'string') {
value = value.replace(`,${Utils.noRecursionHack}`, '');
}
- const fieldVal = key === '#' ? (StrCast(doc.tags).includes(':#' + value + ':') ? StrCast(doc.tags) : undefined) : doc[key];
+ const fieldVal = doc[key];
if (Cast(fieldVal, listSpec('string'), []).length) {
const vals = Cast(fieldVal, listSpec('string'), []);
const docs = vals.some(v => (v as any) instanceof Doc);
@@ -1423,7 +1394,7 @@ export namespace Doc {
return vals.some(v => v.includes(value)); // bcz: arghh: Todo: comparison should be parameterized as exact, or substring
}
const fieldStr = Field.toString(fieldVal as Field);
- return fieldStr.includes(value); // bcz: arghh: Todo: comparison should be parameterized as exact, or substring
+ return fieldStr.includes(value) || (value === String.fromCharCode(127) + '--undefined--' && fieldVal === undefined); // bcz: arghh: Todo: comparison should be parameterized as exact, or substring
}
export function deiconifyView(doc: Doc) {
@@ -1564,20 +1535,26 @@ export namespace Doc {
}
}
- export async function importDocument(file: File) {
+ ///
+ // imports a previously exported zip file which contains a set of documents and their assets (eg, images, videos)
+ // the 'remap' parameter determines whether the ids of the documents loaded should be kept as they were, or remapped to new ids
+ // If they are not remapped, loading the file will overwrite any existing documents with those ids
+ //
+ export async function importDocument(file: File, remap = false) {
const upload = Utils.prepend('/uploadDoc');
const formData = new FormData();
if (file) {
formData.append('file', file);
- formData.append('remap', 'true');
+ formData.append('remap', remap.toString());
const response = await fetch(upload, { method: 'POST', body: formData });
const json = await response.json();
- console.log(json);
if (json !== 'error') {
- await DocServer.GetRefFields(json.docids as string[]);
+ const docs = await DocServer.GetRefFields(json.docids as string[]);
const doc = DocCast(await DocServer.GetRefField(json.id));
- (await DocListCastAsync(doc?.LINKS))?.forEach(link => LinkManager.Instance.addLink(link));
- doc.LINKS = undefined;
+ const links = await DocServer.GetRefFields(json.linkids as string[]);
+ Array.from(Object.keys(links))
+ .map(key => links[key])
+ .forEach(link => link instanceof Doc && LinkManager.Instance.addLink(link));
return doc;
}
}
@@ -1725,8 +1702,11 @@ export namespace Doc {
}
}
+export function IdToDoc(id: string) {
+ return DocCast(DocServer.GetCachedRefField(id));
+}
ScriptingGlobals.add(function idToDoc(id: string): any {
- return DocServer.GetCachedRefField(id);
+ return IdToDoc(id);
});
ScriptingGlobals.add(function renameAlias(doc: any) {
return StrCast(Doc.GetProto(doc).title).replace(/\([0-9]*\)/, '') + `(${doc.aliasNumber})`;