aboutsummaryrefslogtreecommitdiff
path: root/src/fields/Doc.ts
diff options
context:
space:
mode:
Diffstat (limited to 'src/fields/Doc.ts')
-rw-r--r--src/fields/Doc.ts241
1 files changed, 138 insertions, 103 deletions
diff --git a/src/fields/Doc.ts b/src/fields/Doc.ts
index 168e29dd5..662792ff0 100644
--- a/src/fields/Doc.ts
+++ b/src/fields/Doc.ts
@@ -26,11 +26,12 @@ import { Cast, DocCast, FieldValue, NumCast, StrCast, ToConstructor } from './Ty
import { AudioField, ImageField, MapField, PdfField, VideoField, WebField } from './URLField';
import { deleteProperty, GetEffectiveAcl, getField, getter, makeEditable, makeReadOnly, normalizeEmail, setter, SharingPermissions, updateFunction } from './util';
import JSZip = require('jszip');
+import * as JSZipUtils from '../JSZipUtils';
export namespace Field {
export function toKeyValueString(doc: Doc, key: string): string {
const onDelegate = Object.keys(doc).includes(key);
const field = ComputedField.WithoutComputed(() => FieldValue(doc[key]));
- return !Field.IsField(field) ? '' : (onDelegate ? '=' : '') + (field instanceof ComputedField ? `:=${field.script.originalScript}` : Field.toScriptString(field));
+ return !Field.IsField(field) ? '' : (onDelegate ? '=' : '') + (field instanceof ComputedField ? `:=${field.script.originalScript}` : field instanceof ScriptField ? `$=${field.script.originalScript}` : Field.toScriptString(field));
}
export function toScriptString(field: Field): string {
switch (typeof field) {
@@ -517,20 +518,13 @@ export namespace Doc {
}
export async function SetInPlace(doc: Doc, key: string, value: Field | undefined, defaultProto: boolean) {
if (key.startsWith('_')) key = key.substring(1);
- const hasProto = doc.proto instanceof Doc;
+ const hasProto = Doc.GetProto(doc) !== doc ? Doc.GetProto(doc) : undefined;
const onDeleg = Object.getOwnPropertyNames(doc).indexOf(key) !== -1;
const onProto = hasProto && Object.getOwnPropertyNames(doc.proto).indexOf(key) !== -1;
if (onDeleg || !hasProto || (!onProto && !defaultProto)) {
doc[key] = value;
} else doc.proto![key] = value;
}
- export async function SetOnPrototype(doc: Doc, key: string, value: Field) {
- const proto = Object.getOwnPropertyNames(doc).indexOf('isPrototype') === -1 ? doc.proto : doc;
-
- if (proto) {
- proto[key] = value;
- }
- }
export function GetAllPrototypes(doc: Doc): Doc[] {
const protos: Doc[] = [];
let d: Opt<Doc> = doc;
@@ -701,30 +695,43 @@ export namespace Doc {
return bestAlias ?? Doc.MakeAlias(doc);
}
- export async function makeClone(doc: Doc, cloneMap: Map<string, Doc>, linkMap: Map<string, Doc>, rtfs: { copy: Doc; key: string; field: RichTextField }[], exclusions: string[], dontCreate: boolean, asBranch: boolean): Promise<Doc> {
+ // this lists out all the tag ids that can be in a RichTextField that might contain document ids.
+ // if a document is cloned, we need to make sure to clone all of these referenced documents as well;
+ export const DocsInTextFieldIds = ['audioId', 'textId', 'anchorId', 'docId'];
+ export async function makeClone(doc: Doc, cloneMap: Map<string, Doc>, linkMap: Map<string, Doc>, rtfs: { copy: Doc; key: string; field: RichTextField }[], exclusions: string[], cloneLinks: boolean): Promise<Doc> {
if (Doc.IsBaseProto(doc)) return doc;
if (cloneMap.get(doc[Id])) return cloneMap.get(doc[Id])!;
- const copy = dontCreate ? (asBranch ? Cast(doc.branchMaster, Doc, null) ?? doc : doc) : new Doc(undefined, true);
+ const copy = new Doc(undefined, true);
cloneMap.set(doc[Id], copy);
const filter = [...exclusions, ...Cast(doc.cloneFieldFilter, listSpec('string'), [])];
await Promise.all(
Object.keys(doc).map(async key => {
if (filter.includes(key)) return;
- const assignKey = (val: any) => !dontCreate && (copy[key] = val);
+ const assignKey = (val: any) => (copy[key] = val);
const cfield = ComputedField.WithoutComputed(() => FieldValue(doc[key]));
const field = ProxyField.WithoutProxy(() => doc[key]);
const copyObjectField = async (field: ObjectField) => {
const list = await Cast(doc[key], listSpec(Doc));
const docs = list && (await DocListCastAsync(list))?.filter(d => d instanceof Doc);
if (docs !== undefined && docs.length) {
- const clones = await Promise.all(docs.map(async d => Doc.makeClone(d, cloneMap, linkMap, rtfs, exclusions, dontCreate, asBranch)));
- !dontCreate && assignKey(new List<Doc>(clones));
- } else if (doc[key] instanceof Doc) {
- assignKey(key.includes('layout[') ? undefined : key.startsWith('layout') ? (doc[key] as Doc) : await Doc.makeClone(doc[key] as Doc, cloneMap, linkMap, rtfs, exclusions, dontCreate, asBranch)); // reference documents except copy documents that are expanded template fields
+ const clones = await Promise.all(docs.map(async d => Doc.makeClone(d, cloneMap, linkMap, rtfs, exclusions, cloneLinks)));
+ assignKey(new List<Doc>(clones));
} else {
- !dontCreate && assignKey(ObjectField.MakeCopy(field));
+ assignKey(ObjectField.MakeCopy(field));
if (field instanceof RichTextField) {
- if (field.Data.includes('"audioId":') || field.Data.includes('"textId":') || field.Data.includes('"anchorId":')) {
+ if (DocsInTextFieldIds.some(id => field.Data.includes(`"${id}":`))) {
+ const docidsearch = new RegExp('(' + DocsInTextFieldIds.map(exp => '(' + exp + ')').join('|') + ')":"([a-z-A-Z0-9_]*)"', 'g');
+ const rawdocids = field.Data.match(docidsearch);
+ const docids = rawdocids?.map((str: string) =>
+ DocsInTextFieldIds.reduce((output, exp) => {
+ return output.replace(new RegExp(`${exp}":`, 'g'), '');
+ }, str)
+ .replace(/"/g, '')
+ .trim()
+ );
+ const results = docids && (await DocServer.GetRefFields(docids));
+ const docs = results && Array.from(Object.keys(results)).map(key => DocCast(results[key]));
+ docs && docs.map(doc => Doc.makeClone(doc, cloneMap, linkMap, rtfs, exclusions, cloneLinks));
rtfs.push({ copy, key, field });
}
}
@@ -732,64 +739,71 @@ export namespace Doc {
};
const docAtKey = doc[key];
if (docAtKey instanceof Doc) {
- if (!Doc.IsSystem(docAtKey) && (key === 'annotationOn' || (key === 'proto' && cloneMap.has(doc[Id])) || ((key === 'anchor1' || key === 'anchor2') && doc.author === Doc.CurrentUserEmail))) {
- assignKey(await Doc.makeClone(docAtKey, cloneMap, linkMap, rtfs, exclusions, dontCreate, asBranch));
+ if (!Doc.IsSystem(docAtKey) && (key.startsWith('layout') || key === 'annotationOn' || key === 'proto' || ((key === 'anchor1' || key === 'anchor2') && doc.author === Doc.CurrentUserEmail))) {
+ assignKey(await Doc.makeClone(docAtKey, cloneMap, linkMap, rtfs, exclusions, cloneLinks));
} else {
assignKey(docAtKey);
}
+ } else if (field instanceof RefField) {
+ assignKey(field);
+ } else if (cfield instanceof ComputedField) {
+ assignKey(cfield[Copy]());
+ // ComputedField.MakeFunction(cfield.script.originalScript));
+ } else if (field instanceof ObjectField) {
+ await copyObjectField(field);
+ } else if (field instanceof Promise) {
+ debugger; //This shouldn't happen...
} else {
- if (field instanceof RefField) {
- assignKey(field);
- } else if (cfield instanceof ComputedField) {
- !dontCreate && assignKey(cfield[Copy]());
- // ComputedField.MakeFunction(cfield.script.originalScript));
- } else if (field instanceof ObjectField) {
- await copyObjectField(field);
- } else if (field instanceof Promise) {
- debugger; //This shouldn't happen...
- } else {
- assignKey(field);
- }
+ assignKey(field);
}
})
);
- for (const link of Array.from(doc[DirectLinksSym])) {
- const linkClone = await Doc.makeClone(link, cloneMap, linkMap, rtfs, exclusions, dontCreate, asBranch);
- linkMap.set(link[Id], linkClone);
- }
- if (!dontCreate) {
- Doc.SetInPlace(copy, 'title', (asBranch ? 'BRANCH: ' : 'CLONE: ') + doc.title, true);
- asBranch ? (copy.branchOf = doc) : (copy.cloneOf = doc);
- if (!Doc.IsPrototype(copy)) {
- Doc.AddDocToList(doc, 'branches', Doc.GetProto(copy));
+ Array.from(doc[DirectLinksSym]).forEach(async link => {
+ if (
+ cloneLinks ||
+ ((cloneMap.has(DocCast(link.anchor1)?.[Id]) || cloneMap.has(DocCast(DocCast(link.anchor1)?.annotationOn)?.[Id])) && (cloneMap.has(DocCast(link.anchor2)?.[Id]) || cloneMap.has(DocCast(DocCast(link.anchor2)?.annotationOn)?.[Id])))
+ ) {
+ linkMap.set(link[Id], await Doc.makeClone(link, cloneMap, linkMap, rtfs, exclusions, cloneLinks));
}
- cloneMap.set(doc[Id], copy);
- }
+ });
+ Doc.SetInPlace(copy, 'title', 'CLONE: ' + doc.title, true);
+ copy.cloneOf = doc;
+ cloneMap.set(doc[Id], copy);
+
Doc.AddFileOrphan(copy);
return copy;
}
- export function repairClone(doc: Doc, cloned: Doc[], visited: Set<Doc>) {
- if (visited.has(doc)) return;
- visited.add(doc);
- Object.keys(doc).map(key => {
- const docAtKey = DocCast(doc[key]);
- if (docAtKey && !Doc.IsSystem(docAtKey)) {
- if (!cloned.includes(docAtKey)) {
- doc[key] = undefined;
- } else {
- repairClone(docAtKey, cloned, visited);
+ export function repairClone(clone: Doc, cloneMap: Map<string, Doc>, visited: Set<Doc>) {
+ if (visited.has(clone)) return;
+ visited.add(clone);
+ Object.keys(clone)
+ .filter(key => key !== 'cloneOf')
+ .map(key => {
+ const docAtKey = DocCast(clone[key]);
+ if (docAtKey && !Doc.IsSystem(docAtKey)) {
+ if (!Array.from(cloneMap.values()).includes(docAtKey)) {
+ if (cloneMap.has(docAtKey[Id])) {
+ clone[key] = cloneMap.get(docAtKey[Id]);
+ } else clone[key] = undefined;
+ } else {
+ repairClone(docAtKey, cloneMap, visited);
+ }
}
- }
- });
+ });
+ }
+ export function MakeClones(docs: Doc[], cloneLinks: boolean) {
+ const cloneMap = new Map<string, Doc>();
+ return docs.map(doc => Doc.MakeClone(doc, cloneLinks, cloneMap));
}
- export async function MakeClone(doc: Doc, dontCreate: boolean = false, asBranch = false, cloneMap: Map<string, Doc> = new Map()) {
+
+ export async function MakeClone(doc: Doc, cloneLinks = true, cloneMap: Map<string, Doc> = new Map()) {
const linkMap = new Map<string, Doc>();
const rtfMap: { copy: Doc; key: string; field: RichTextField }[] = [];
- const copy = await Doc.makeClone(doc, cloneMap, linkMap, rtfMap, ['cloneOf', 'branches', 'branchOf'], dontCreate, asBranch);
+ const copy = await Doc.makeClone(doc, cloneMap, linkMap, rtfMap, ['cloneOf'], cloneLinks);
const repaired = new Set<Doc>();
const linkedDocs = Array.from(linkMap.values());
const clonedDocs = [...Array.from(cloneMap.values()), ...linkedDocs];
- clonedDocs.map(clone => Doc.repairClone(clone, Array.from(cloneMap.values()), repaired));
+ clonedDocs.map(clone => Doc.repairClone(clone, cloneMap, repaired));
linkedDocs.map((link: Doc) => LinkManager.Instance.addLink(link, true));
rtfMap.map(({ copy, key, field }) => {
const replacer = (match: any, attr: string, id: string, offset: any, string: any) => {
@@ -802,7 +816,8 @@ export namespace Doc {
};
const regex = `(${Doc.localServerPath()})([^"]*)`;
const re = new RegExp(regex, 'g');
- copy[key] = new RichTextField(field.Data.replace(/("textId":|"audioId":|"anchorId":)"([^"]+)"/g, replacer).replace(re, replacer2), field.Text);
+ const docidsearch = new RegExp('(' + DocsInTextFieldIds.map(exp => `"${exp}":`).join('|') + ')"([^"]+)"', 'g');
+ copy[key] = new RichTextField(field.Data.replace(docidsearch, replacer).replace(re, replacer2), field.Text);
});
return { clone: copy, map: cloneMap, linkMap };
}
@@ -813,10 +828,11 @@ export namespace Doc {
// a.href = url;
// a.download = `DocExport-${this.props.Document[Id]}.zip`;
// a.click();
- const { clone, map, linkMap } = await Doc.MakeClone(doc, true);
+ const { clone, map, linkMap } = await Doc.MakeClone(doc);
clone.LINKS = new List<Doc>(Array.from(linkMap.values()));
+ const proms = [] as string[];
function replacer(key: any, value: any) {
- if (['branchOf', 'cloneOf', 'cursors'].includes(key)) return undefined;
+ if (key && ['branchOf', 'cloneOf', 'cursors'].includes(key)) return undefined;
else if (value instanceof Doc) {
if (key !== 'field' && Number.isNaN(Number(key))) {
const __fields = value[FieldsSym]();
@@ -826,9 +842,14 @@ export namespace Doc {
}
} else if (value instanceof ScriptField) return { script: value.script, __type: 'script' };
else if (value instanceof RichTextField) return { Data: value.Data, Text: value.Text, __type: 'RichTextField' };
- else if (value instanceof ImageField) return { url: value.url.href, __type: 'image' };
- else if (value instanceof PdfField) return { url: value.url.href, __type: 'pdf' };
- else if (value instanceof AudioField) return { url: value.url.href, __type: 'audio' };
+ else if (value instanceof ImageField) {
+ const extension = value.url.href.replace(/.*\./, '');
+ proms.push(value.url.href.replace('.' + extension, '_o.' + extension));
+ return { url: value.url.href, __type: 'image' };
+ } else if (value instanceof PdfField) {
+ proms.push(value.url.href);
+ return { url: value.url.href, __type: 'pdf' };
+ } else if (value instanceof AudioField) return { url: value.url.href, __type: 'audio' };
else if (value instanceof VideoField) return { url: value.url.href, __type: 'video' };
else if (value instanceof WebField) return { url: value.url.href, __type: 'web' };
else if (value instanceof MapField) return { url: value.url.href, __type: 'map' };
@@ -841,8 +862,34 @@ export namespace Doc {
const docs: { [id: string]: any } = {};
Array.from(map.entries()).forEach(f => (docs[f[0]] = f[1]));
- const docString = JSON.stringify({ id: doc[Id], docs }, decycle(replacer));
-
+ const docString = JSON.stringify({ id: clone[Id], docs }, decycle(replacer));
+
+ let generateZIP = (proms: string[]) => {
+ var zip = new JSZip();
+ var count = 0;
+ var zipFilename = 'dashExport.zip';
+
+ proms
+ .filter(url => url.startsWith(window.location.origin))
+ .forEach((url, i) => {
+ var filename = proms[i].replace(window.location.origin + '/', '').replace(/\//g, '%%%');
+ // loading a file and add it in a zip file
+ JSZipUtils.getBinaryContent(url, function (err: any, data: any) {
+ if (err) {
+ throw err; // or handle the error
+ }
+ zip.file(filename, data, { binary: true });
+ count++;
+ if (count == proms.length) {
+ zip.file('doc.json', docString);
+ zip.generateAsync({ type: 'blob' }).then(function (content) {
+ saveAs(content, zipFilename);
+ });
+ }
+ });
+ });
+ };
+ generateZIP(proms);
const zip = new JSZip();
zip.file('doc.json', docString);
@@ -859,57 +906,40 @@ export namespace Doc {
saveAs(content, doc.title + '.zip'); // glr: Possibly change the name of the document to match the title?
});
}
- //
- // Determines whether the layout needs to be expanded (as a template).
- // template expansion is rquired when the layout is a template doc/field and there's a datadoc which isn't equal to the layout template
- //
- export function WillExpandTemplateLayout(layoutDoc: Doc, dataDoc?: Doc) {
- return (layoutDoc.isTemplateForField || layoutDoc.isTemplateDoc) && dataDoc && layoutDoc !== dataDoc;
- }
const _pendingMap: Map<string, boolean> = new Map();
//
// Returns an expanded template layout for a target data document if there is a template relationship
// between the two. If so, the layoutDoc is expanded into a new document that inherits the properties
// of the original layout while allowing for individual layout properties to be overridden in the expanded layout.
- // templateArgs should be equivalent to the layout key that generates the template since that's where the template parameters are stored in ()'s at the end of the key.
- // NOTE: the template will have references to "@params" -- the template arguments will be assigned to the '@params' field
- // so that when the @params key is accessed, it will be rewritten as the key that is stored in the 'params' field and
- // the derefence will then occur on the rootDocument (the original document).
- // in the future, field references could be written as @<someparam> and then arguments would be passed in the layout key as:
- // layout_mytemplate(somparam=somearg).
- // then any references to @someparam would be rewritten as accesses to 'somearg' on the rootDocument
- export function expandTemplateLayout(templateLayoutDoc: Doc, targetDoc?: Doc, templateArgs?: string) {
- const args = templateArgs?.match(/\(([a-zA-Z0-9._\-]*)\)/)?.[1].replace('()', '') || StrCast(templateLayoutDoc.PARAMS);
- if ((!args && !WillExpandTemplateLayout(templateLayoutDoc, targetDoc)) || !targetDoc) return templateLayoutDoc;
-
- const templateField = StrCast(templateLayoutDoc.isTemplateForField); // the field that the template renders
+ export function expandTemplateLayout(templateLayoutDoc: Doc, targetDoc?: Doc) {
+ // nothing to do if the layout isn't a template or we don't have a target that's different than the template
+ if (!targetDoc || templateLayoutDoc === targetDoc || (!templateLayoutDoc.isTemplateForField && !templateLayoutDoc.isTemplateDoc)) {
+ return templateLayoutDoc;
+ }
+
+ const templateField = StrCast(templateLayoutDoc.isTemplateForField, Doc.LayoutFieldKey(templateLayoutDoc)); // the field that the template renders
// First it checks if an expanded layout already exists -- if so it will be stored on the dataDoc
// using the template layout doc's id as the field key.
// If it doesn't find the expanded layout, then it makes a delegate of the template layout and
// saves it on the data doc indexed by the template layout's id.
//
- const params = args.split('=').length > 1 ? args.split('=')[0] : 'PARAMS';
- const layoutFielddKey = Doc.LayoutFieldKey(templateLayoutDoc);
- const expandedLayoutFieldKey = (templateField || layoutFielddKey) + '-layout[' + templateLayoutDoc[Id] + (args ? `(${args})` : '') + ']';
+ const expandedLayoutFieldKey = templateField + '-layout[' + templateLayoutDoc[Id] + ']';
let expandedTemplateLayout = targetDoc?.[expandedLayoutFieldKey];
if (templateLayoutDoc.resolvedDataDoc instanceof Promise) {
expandedTemplateLayout = undefined;
_pendingMap.set(targetDoc[Id] + expandedLayoutFieldKey, true);
- } else if (expandedTemplateLayout === undefined && !_pendingMap.get(targetDoc[Id] + expandedLayoutFieldKey + args)) {
- if (templateLayoutDoc.resolvedDataDoc === (targetDoc.rootDocument || Doc.GetProto(targetDoc)) && templateLayoutDoc.PARAMS === StrCast(targetDoc.PARAMS)) {
+ } else if (expandedTemplateLayout === undefined && !_pendingMap.get(targetDoc[Id] + expandedLayoutFieldKey)) {
+ if (templateLayoutDoc.resolvedDataDoc === (targetDoc.rootDocument || Doc.GetProto(targetDoc))) {
expandedTemplateLayout = templateLayoutDoc; // reuse an existing template layout if its for the same document with the same params
} else {
templateLayoutDoc.resolvedDataDoc && (templateLayoutDoc = Cast(templateLayoutDoc.proto, Doc, null) || templateLayoutDoc); // if the template has already been applied (ie, a nested template), then use the template's prototype
if (!targetDoc[expandedLayoutFieldKey]) {
- _pendingMap.set(targetDoc[Id] + expandedLayoutFieldKey + args, true);
+ _pendingMap.set(targetDoc[Id] + expandedLayoutFieldKey, true);
setTimeout(
action(() => {
const newLayoutDoc = Doc.MakeDelegate(templateLayoutDoc, undefined, '[' + templateLayoutDoc.title + ']');
- // the template's arguments are stored in params which is derefenced to find
- // the actual field key where the parameterized template data is stored.
- newLayoutDoc[params] = args !== '...' ? args : ''; // ... signifies the layout has sub template(s) -- so we have to expand the layout for them so that they can get the correct 'rootDocument' field, but we don't need to reassign their params. it would be better if the 'rootDocument' field could be passed dynamically to avoid have to create instances
newLayoutDoc.rootDocument = targetDoc;
const dataDoc = Doc.GetProto(targetDoc);
newLayoutDoc.resolvedDataDoc = dataDoc;
@@ -918,7 +948,7 @@ export namespace Doc {
}
targetDoc[expandedLayoutFieldKey] = newLayoutDoc;
- _pendingMap.delete(targetDoc[Id] + expandedLayoutFieldKey + args);
+ _pendingMap.delete(targetDoc[Id] + expandedLayoutFieldKey);
})
);
}
@@ -934,8 +964,8 @@ export namespace Doc {
console.log('No, no, no!');
return { layout: childDoc, data: childDoc };
}
- const resolvedDataDoc = Doc.AreProtosEqual(containerDataDoc, containerDoc) || (!childDoc.isTemplateDoc && !childDoc.isTemplateForField && !childDoc.PARAMS) ? undefined : containerDataDoc;
- return { layout: Doc.expandTemplateLayout(childDoc, resolvedDataDoc, '(' + StrCast(containerDoc.PARAMS) + ')'), data: resolvedDataDoc };
+ const resolvedDataDoc = Doc.AreProtosEqual(containerDataDoc, containerDoc) || (!childDoc.isTemplateDoc && !childDoc.isTemplateForField) ? undefined : containerDataDoc;
+ return { layout: Doc.expandTemplateLayout(childDoc, resolvedDataDoc), data: resolvedDataDoc };
}
export function Overwrite(doc: Doc, overwrite: Doc, copyProto: boolean = false): Doc {
@@ -1166,7 +1196,7 @@ export namespace Doc {
return doc[StrCast(doc.layoutKey, 'layout')];
}
export function LayoutFieldKey(doc: Doc): string {
- return StrCast(Doc.Layout(doc).layout).split("'")[1];
+ return StrCast(Doc.Layout(doc)[StrCast(doc.layoutKey, 'layout')]).split("'")[1]; // bcz: TODO check on this . used to always reference 'layout', now it uses the layout speicfied by the current layoutKey
}
export function NativeAspect(doc: Doc, dataDoc?: Doc, useDim?: boolean) {
return Doc.NativeWidth(doc, dataDoc, useDim) / (Doc.NativeHeight(doc, dataDoc, useDim) || 1);
@@ -1175,9 +1205,10 @@ export namespace Doc {
return !doc ? 0 : NumCast(doc._nativeWidth, NumCast((dataDoc || doc)[Doc.LayoutFieldKey(doc) + '-nativeWidth'], useWidth ? doc[WidthSym]() : 0));
}
export function NativeHeight(doc?: Doc, dataDoc?: Doc, useHeight?: boolean) {
- const dheight = doc ? NumCast((dataDoc || doc)[Doc.LayoutFieldKey(doc) + '-nativeHeight'], useHeight ? doc[HeightSym]() : 0) : 0;
- const nheight = doc ? (Doc.NativeWidth(doc, dataDoc, useHeight) * doc[HeightSym]()) / doc[WidthSym]() : 0;
- return !doc ? 0 : NumCast(doc._nativeHeight, nheight || dheight);
+ if (!doc) return 0;
+ const nheight = (Doc.NativeWidth(doc, dataDoc, useHeight) * doc[HeightSym]()) / doc[WidthSym]();
+ const dheight = NumCast((dataDoc || doc)[Doc.LayoutFieldKey(doc) + '-nativeHeight'], useHeight ? doc[HeightSym]() : 0);
+ return NumCast(doc._nativeHeight, nheight || dheight);
}
export function SetNativeWidth(doc: Doc, width: number | undefined, fieldKey?: string) {
doc[(fieldKey ?? Doc.LayoutFieldKey(doc)) + '-nativeWidth'] = width;
@@ -1287,6 +1318,7 @@ export namespace Doc {
}
export function LinkEndpoint(linkDoc: Doc, anchorDoc: Doc) {
+ if (linkDoc.anchor2 === anchorDoc || (linkDoc.anchor2 as Doc).annotationOn) return '2';
return Doc.AreProtosEqual(anchorDoc, (linkDoc.anchor1 as Doc).annotationOn as Doc) || Doc.AreProtosEqual(anchorDoc, linkDoc.anchor1 as Doc) ? '1' : '2';
}
@@ -1345,7 +1377,7 @@ export namespace Doc {
});
}
export function UnBrushAllDocs() {
- brushManager.BrushedDoc.clear();
+ runInAction(() => brushManager.BrushedDoc.clear());
}
export function getDocTemplate(doc?: Doc) {
@@ -1527,9 +1559,12 @@ export namespace Doc {
formData.append('remap', 'true');
const response = await fetch(upload, { method: 'POST', body: formData });
const json = await response.json();
+ console.log(json);
if (json !== 'error') {
- const doc = DocCast(await DocServer.GetRefField(json));
+ await DocServer.GetRefFields(json.docids as string[]);
+ const doc = DocCast(await DocServer.GetRefField(json.id));
(await DocListCastAsync(doc?.LINKS))?.forEach(link => LinkManager.Instance.addLink(link));
+ doc.LINKS = undefined;
return doc;
}
}