aboutsummaryrefslogtreecommitdiff
path: root/src/fields/Doc.ts
diff options
context:
space:
mode:
authorbobzel <zzzman@gmail.com>2023-04-17 09:37:16 -0400
committerbobzel <zzzman@gmail.com>2023-04-17 09:37:16 -0400
commit6a9e80de419af14bece7a48e55edc1543d69f20f (patch)
tree71ae1b819bc4f7fdb699ae90c035eb86275c5006 /src/fields/Doc.ts
parent0a38e3f91f4f85f07fdbb7575ceb678032dcdfe9 (diff)
parent8127616d06b4db2b29de0b13068810fd19e77b5e (diff)
Merge branch 'master' into james-server-stats
Diffstat (limited to 'src/fields/Doc.ts')
-rw-r--r--src/fields/Doc.ts331
1 files changed, 184 insertions, 147 deletions
diff --git a/src/fields/Doc.ts b/src/fields/Doc.ts
index df49c32f0..662792ff0 100644
--- a/src/fields/Doc.ts
+++ b/src/fields/Doc.ts
@@ -10,6 +10,7 @@ import { scriptingGlobal, ScriptingGlobals } from '../client/util/ScriptingGloba
import { SelectionManager } from '../client/util/SelectionManager';
import { afterDocDeserialize, autoObject, Deserializable, SerializationHelper } from '../client/util/SerializationHelper';
import { UndoManager } from '../client/util/UndoManager';
+import { decycle } from '../decycler/decycler';
import { DashColor, incrementTitleCopy, intersectRect, Utils } from '../Utils';
import { DateField } from './DateField';
import { Copy, HandleUpdate, Id, OnUpdate, Parent, Self, SelfProxy, ToScriptString, ToString, Update } from './FieldSymbols';
@@ -25,28 +26,27 @@ import { Cast, DocCast, FieldValue, NumCast, StrCast, ToConstructor } from './Ty
import { AudioField, ImageField, MapField, PdfField, VideoField, WebField } from './URLField';
import { deleteProperty, GetEffectiveAcl, getField, getter, makeEditable, makeReadOnly, normalizeEmail, setter, SharingPermissions, updateFunction } from './util';
import JSZip = require('jszip');
-
+import * as JSZipUtils from '../JSZipUtils';
export namespace Field {
export function toKeyValueString(doc: Doc, key: string): string {
const onDelegate = Object.keys(doc).includes(key);
const field = ComputedField.WithoutComputed(() => FieldValue(doc[key]));
- return !Field.IsField(field) ? '' : (onDelegate ? '=' : '') + (field instanceof ComputedField ? `:=${field.script.originalScript}` : Field.toScriptString(field));
+ return !Field.IsField(field) ? '' : (onDelegate ? '=' : '') + (field instanceof ComputedField ? `:=${field.script.originalScript}` : field instanceof ScriptField ? `$=${field.script.originalScript}` : Field.toScriptString(field));
}
export function toScriptString(field: Field): string {
- if (typeof field === 'string') {
- if (field.startsWith('{"')) return `'${field}'`; // bcz: hack ... want to quote the string the right way. if there are nested "'s, then use ' instead of ". In this case, test for the start of a JSON string of the format {"property": ... } and use outer 's instead of "s
- return `"${field}"`;
+ switch (typeof field) {
+ case 'string':
+ if (field.startsWith('{"')) return `'${field}'`; // bcz: hack ... want to quote the string the right way. if there are nested "'s, then use ' instead of ". In this case, test for the start of a JSON string of the format {"property": ... } and use outer 's instead of "s
+ return `"${field}"`;
+ case 'number':
+ case 'boolean':
+ return String(field);
}
- if (typeof field === 'number' || typeof field === 'boolean') return String(field);
- if (field === undefined || field === null) return 'null';
- return field[ToScriptString]();
+ return field?.[ToScriptString]?.() ?? 'null';
}
export function toString(field: Field): string {
- if (typeof field === 'string') return field;
- if (typeof field === 'number' || typeof field === 'boolean') return String(field);
- if (field instanceof ObjectField) return field[ToString]();
- if (field instanceof RefField) return field[ToString]();
- return '';
+ if (typeof field === 'string' || typeof field === 'number' || typeof field === 'boolean') return String(field);
+ return field?.[ToString]?.() || '';
}
export function IsField(field: any): field is Field;
export function IsField(field: any, includeUndefined: true): field is Field | undefined;
@@ -79,17 +79,14 @@ export async function DocCastAsync(field: FieldResult): Promise<Opt<Doc>> {
return Cast(field, Doc);
}
-export function NumListCast(field: FieldResult) {
- return Cast(field, listSpec('number'), []);
-}
-export function StrListCast(field: FieldResult) {
- return Cast(field, listSpec('string'), []);
+export function NumListCast(field: FieldResult, defaultVal: number[] = []) {
+ return Cast(field, listSpec('number'), defaultVal);
}
-export function DocListCast(field: FieldResult) {
- return Cast(field, listSpec(Doc), []).filter(d => d instanceof Doc) as Doc[];
+export function StrListCast(field: FieldResult, defaultVal: string[] = []) {
+ return Cast(field, listSpec('string'), defaultVal);
}
-export function DocListCastOrNull(field: FieldResult) {
- return Cast(field, listSpec(Doc), null)?.filter(d => d instanceof Doc) as Doc[] | undefined;
+export function DocListCast(field: FieldResult, defaultVal: Doc[] = []) {
+ return Cast(field, listSpec(Doc), defaultVal).filter(d => d instanceof Doc) as Doc[];
}
export const WidthSym = Symbol('Width');
@@ -153,17 +150,8 @@ export function updateCachedAcls(doc: Doc) {
}
@scriptingGlobal
-@Deserializable('Doc', updateCachedAcls).withFields(['id'])
+@Deserializable('Doc', updateCachedAcls, ['id'])
export class Doc extends RefField {
- //TODO tfs: these should be temporary...
- private static mainDocId: string | undefined;
- public static get MainDocId() {
- return this.mainDocId;
- }
- public static set MainDocId(id: string | undefined) {
- this.mainDocId = id;
- }
-
@observable public static CurrentlyLoading: Doc[];
// removes from currently loading display
@action
@@ -530,20 +518,13 @@ export namespace Doc {
}
export async function SetInPlace(doc: Doc, key: string, value: Field | undefined, defaultProto: boolean) {
if (key.startsWith('_')) key = key.substring(1);
- const hasProto = doc.proto instanceof Doc;
+ const hasProto = Doc.GetProto(doc) !== doc ? Doc.GetProto(doc) : undefined;
const onDeleg = Object.getOwnPropertyNames(doc).indexOf(key) !== -1;
const onProto = hasProto && Object.getOwnPropertyNames(doc.proto).indexOf(key) !== -1;
if (onDeleg || !hasProto || (!onProto && !defaultProto)) {
doc[key] = value;
} else doc.proto![key] = value;
}
- export async function SetOnPrototype(doc: Doc, key: string, value: Field) {
- const proto = Object.getOwnPropertyNames(doc).indexOf('isPrototype') === -1 ? doc.proto : doc;
-
- if (proto) {
- proto[key] = value;
- }
- }
export function GetAllPrototypes(doc: Doc): Doc[] {
const protos: Doc[] = [];
let d: Opt<Doc> = doc;
@@ -582,21 +563,13 @@ export namespace Doc {
// compare whether documents or their protos match
export function AreProtosEqual(doc?: Doc, other?: Doc) {
- if (!doc || !other) return false;
- const r = doc === other;
- const r2 = Doc.GetProto(doc) === other;
- const r3 = Doc.GetProto(other) === doc;
- const r4 = Doc.GetProto(doc) === Doc.GetProto(other) && Doc.GetProto(other) !== undefined;
- return r || r2 || r3 || r4;
+ return doc && other && Doc.GetProto(doc) === Doc.GetProto(other);
}
// Gets the data document for the document. Note: this is mis-named -- it does not specifically
// return the doc's proto, but rather recursively searches through the proto inheritance chain
// and returns the document who's proto is undefined or whose proto is marked as a base prototype ('isPrototype').
export function GetProto(doc: Doc): Doc {
- if (doc instanceof Promise) {
- // console.log("GetProto: warning: got Promise insead of Doc");
- }
const proto = doc && (Doc.GetT(doc, 'isPrototype', 'boolean', true) ? doc : doc.proto || doc);
return proto === doc ? proto : Doc.GetProto(proto);
}
@@ -722,79 +695,116 @@ export namespace Doc {
return bestAlias ?? Doc.MakeAlias(doc);
}
- export async function makeClone(doc: Doc, cloneMap: Map<string, Doc>, linkMap: Map<Doc, Doc>, rtfs: { copy: Doc; key: string; field: RichTextField }[], exclusions: string[], dontCreate: boolean, asBranch: boolean): Promise<Doc> {
+ // this lists out all the tag ids that can be in a RichTextField that might contain document ids.
+ // if a document is cloned, we need to make sure to clone all of these referenced documents as well;
+ export const DocsInTextFieldIds = ['audioId', 'textId', 'anchorId', 'docId'];
+ export async function makeClone(doc: Doc, cloneMap: Map<string, Doc>, linkMap: Map<string, Doc>, rtfs: { copy: Doc; key: string; field: RichTextField }[], exclusions: string[], cloneLinks: boolean): Promise<Doc> {
if (Doc.IsBaseProto(doc)) return doc;
if (cloneMap.get(doc[Id])) return cloneMap.get(doc[Id])!;
- const copy = dontCreate ? (asBranch ? Cast(doc.branchMaster, Doc, null) || doc : doc) : new Doc(undefined, true);
+ const copy = new Doc(undefined, true);
cloneMap.set(doc[Id], copy);
- const fieldExclusions = doc.type === DocumentType.MARKER ? exclusions.filter(ex => ex !== 'annotationOn') : exclusions;
- const filter = [...fieldExclusions, ...Cast(doc.cloneFieldFilter, listSpec('string'), [])];
+ const filter = [...exclusions, ...Cast(doc.cloneFieldFilter, listSpec('string'), [])];
await Promise.all(
Object.keys(doc).map(async key => {
if (filter.includes(key)) return;
- const assignKey = (val: any) => !dontCreate && (copy[key] = val);
+ const assignKey = (val: any) => (copy[key] = val);
const cfield = ComputedField.WithoutComputed(() => FieldValue(doc[key]));
const field = ProxyField.WithoutProxy(() => doc[key]);
const copyObjectField = async (field: ObjectField) => {
const list = await Cast(doc[key], listSpec(Doc));
const docs = list && (await DocListCastAsync(list))?.filter(d => d instanceof Doc);
if (docs !== undefined && docs.length) {
- const clones = await Promise.all(docs.map(async d => Doc.makeClone(d, cloneMap, linkMap, rtfs, exclusions, dontCreate, asBranch)));
- !dontCreate && assignKey(new List<Doc>(clones));
- } else if (doc[key] instanceof Doc) {
- assignKey(key.includes('layout[') ? undefined : key.startsWith('layout') ? (doc[key] as Doc) : await Doc.makeClone(doc[key] as Doc, cloneMap, linkMap, rtfs, exclusions, dontCreate, asBranch)); // reference documents except copy documents that are expanded template fields
+ const clones = await Promise.all(docs.map(async d => Doc.makeClone(d, cloneMap, linkMap, rtfs, exclusions, cloneLinks)));
+ assignKey(new List<Doc>(clones));
} else {
- !dontCreate && assignKey(ObjectField.MakeCopy(field));
+ assignKey(ObjectField.MakeCopy(field));
if (field instanceof RichTextField) {
- if (field.Data.includes('"audioId":') || field.Data.includes('"textId":') || field.Data.includes('"anchorId":')) {
+ if (DocsInTextFieldIds.some(id => field.Data.includes(`"${id}":`))) {
+ const docidsearch = new RegExp('(' + DocsInTextFieldIds.map(exp => '(' + exp + ')').join('|') + ')":"([a-z-A-Z0-9_]*)"', 'g');
+ const rawdocids = field.Data.match(docidsearch);
+ const docids = rawdocids?.map((str: string) =>
+ DocsInTextFieldIds.reduce((output, exp) => {
+ return output.replace(new RegExp(`${exp}":`, 'g'), '');
+ }, str)
+ .replace(/"/g, '')
+ .trim()
+ );
+ const results = docids && (await DocServer.GetRefFields(docids));
+ const docs = results && Array.from(Object.keys(results)).map(key => DocCast(results[key]));
+ docs && docs.map(doc => Doc.makeClone(doc, cloneMap, linkMap, rtfs, exclusions, cloneLinks));
rtfs.push({ copy, key, field });
}
}
}
};
- if (key === 'proto') {
- if (doc[key] instanceof Doc) {
- assignKey(await Doc.makeClone(doc[key] as Doc, cloneMap, linkMap, rtfs, exclusions, dontCreate, asBranch));
- }
- } else if (key === 'anchor1' || key === 'anchor2') {
- if (doc[key] instanceof Doc) {
- assignKey(await Doc.makeClone(doc[key] as Doc, cloneMap, linkMap, rtfs, exclusions, true, asBranch));
- }
- } else {
- if (field instanceof RefField) {
- assignKey(field);
- } else if (cfield instanceof ComputedField) {
- !dontCreate && assignKey(ComputedField.MakeFunction(cfield.script.originalScript));
- } else if (field instanceof ObjectField) {
- await copyObjectField(field);
- } else if (field instanceof Promise) {
- debugger; //This shouldn't happen...
+ const docAtKey = doc[key];
+ if (docAtKey instanceof Doc) {
+ if (!Doc.IsSystem(docAtKey) && (key.startsWith('layout') || key === 'annotationOn' || key === 'proto' || ((key === 'anchor1' || key === 'anchor2') && doc.author === Doc.CurrentUserEmail))) {
+ assignKey(await Doc.makeClone(docAtKey, cloneMap, linkMap, rtfs, exclusions, cloneLinks));
} else {
- assignKey(field);
+ assignKey(docAtKey);
}
+ } else if (field instanceof RefField) {
+ assignKey(field);
+ } else if (cfield instanceof ComputedField) {
+ assignKey(cfield[Copy]());
+ // ComputedField.MakeFunction(cfield.script.originalScript));
+ } else if (field instanceof ObjectField) {
+ await copyObjectField(field);
+ } else if (field instanceof Promise) {
+ debugger; //This shouldn't happen...
+ } else {
+ assignKey(field);
}
})
);
- for (const link of Array.from(doc[DirectLinksSym])) {
- const linkClone = await Doc.makeClone(link, cloneMap, linkMap, rtfs, exclusions, dontCreate, asBranch);
- linkMap.set(link, linkClone);
- }
- if (!dontCreate) {
- Doc.SetInPlace(copy, 'title', (asBranch ? 'BRANCH: ' : 'CLONE: ') + doc.title, true);
- asBranch ? (copy.branchOf = doc) : (copy.cloneOf = doc);
- if (!Doc.IsPrototype(copy)) {
- Doc.AddDocToList(doc, 'branches', Doc.GetProto(copy));
+ Array.from(doc[DirectLinksSym]).forEach(async link => {
+ if (
+ cloneLinks ||
+ ((cloneMap.has(DocCast(link.anchor1)?.[Id]) || cloneMap.has(DocCast(DocCast(link.anchor1)?.annotationOn)?.[Id])) && (cloneMap.has(DocCast(link.anchor2)?.[Id]) || cloneMap.has(DocCast(DocCast(link.anchor2)?.annotationOn)?.[Id])))
+ ) {
+ linkMap.set(link[Id], await Doc.makeClone(link, cloneMap, linkMap, rtfs, exclusions, cloneLinks));
}
- cloneMap.set(doc[Id], copy);
- }
+ });
+ Doc.SetInPlace(copy, 'title', 'CLONE: ' + doc.title, true);
+ copy.cloneOf = doc;
+ cloneMap.set(doc[Id], copy);
+
Doc.AddFileOrphan(copy);
return copy;
}
- export async function MakeClone(doc: Doc, dontCreate: boolean = false, asBranch = false, cloneMap: Map<string, Doc> = new Map()) {
- const linkMap = new Map<Doc, Doc>();
+ export function repairClone(clone: Doc, cloneMap: Map<string, Doc>, visited: Set<Doc>) {
+ if (visited.has(clone)) return;
+ visited.add(clone);
+ Object.keys(clone)
+ .filter(key => key !== 'cloneOf')
+ .map(key => {
+ const docAtKey = DocCast(clone[key]);
+ if (docAtKey && !Doc.IsSystem(docAtKey)) {
+ if (!Array.from(cloneMap.values()).includes(docAtKey)) {
+ if (cloneMap.has(docAtKey[Id])) {
+ clone[key] = cloneMap.get(docAtKey[Id]);
+ } else clone[key] = undefined;
+ } else {
+ repairClone(docAtKey, cloneMap, visited);
+ }
+ }
+ });
+ }
+ export function MakeClones(docs: Doc[], cloneLinks: boolean) {
+ const cloneMap = new Map<string, Doc>();
+ return docs.map(doc => Doc.MakeClone(doc, cloneLinks, cloneMap));
+ }
+
+ export async function MakeClone(doc: Doc, cloneLinks = true, cloneMap: Map<string, Doc> = new Map()) {
+ const linkMap = new Map<string, Doc>();
const rtfMap: { copy: Doc; key: string; field: RichTextField }[] = [];
- const copy = await Doc.makeClone(doc, cloneMap, linkMap, rtfMap, ['cloneOf', 'branches', 'branchOf', 'context'], dontCreate, asBranch);
- Array.from(linkMap.entries()).map((links: Doc[]) => LinkManager.Instance.addLink(links[1], true));
+ const copy = await Doc.makeClone(doc, cloneMap, linkMap, rtfMap, ['cloneOf'], cloneLinks);
+ const repaired = new Set<Doc>();
+ const linkedDocs = Array.from(linkMap.values());
+ const clonedDocs = [...Array.from(cloneMap.values()), ...linkedDocs];
+ clonedDocs.map(clone => Doc.repairClone(clone, cloneMap, repaired));
+ linkedDocs.map((link: Doc) => LinkManager.Instance.addLink(link, true));
rtfMap.map(({ copy, key, field }) => {
const replacer = (match: any, attr: string, id: string, offset: any, string: any) => {
const mapped = cloneMap.get(id);
@@ -806,9 +816,10 @@ export namespace Doc {
};
const regex = `(${Doc.localServerPath()})([^"]*)`;
const re = new RegExp(regex, 'g');
- copy[key] = new RichTextField(field.Data.replace(/("textId":|"audioId":|"anchorId":)"([^"]+)"/g, replacer).replace(re, replacer2), field.Text);
+ const docidsearch = new RegExp('(' + DocsInTextFieldIds.map(exp => `"${exp}":`).join('|') + ')"([^"]+)"', 'g');
+ copy[key] = new RichTextField(field.Data.replace(docidsearch, replacer).replace(re, replacer2), field.Text);
});
- return { clone: copy, map: cloneMap };
+ return { clone: copy, map: cloneMap, linkMap };
}
export async function Zip(doc: Doc) {
@@ -817,9 +828,11 @@ export namespace Doc {
// a.href = url;
// a.download = `DocExport-${this.props.Document[Id]}.zip`;
// a.click();
- const { clone, map } = await Doc.MakeClone(doc, true);
+ const { clone, map, linkMap } = await Doc.MakeClone(doc);
+ clone.LINKS = new List<Doc>(Array.from(linkMap.values()));
+ const proms = [] as string[];
function replacer(key: any, value: any) {
- if (['branchOf', 'cloneOf', 'context', 'cursors'].includes(key)) return undefined;
+ if (key && ['branchOf', 'cloneOf', 'cursors'].includes(key)) return undefined;
else if (value instanceof Doc) {
if (key !== 'field' && Number.isNaN(Number(key))) {
const __fields = value[FieldsSym]();
@@ -829,9 +842,14 @@ export namespace Doc {
}
} else if (value instanceof ScriptField) return { script: value.script, __type: 'script' };
else if (value instanceof RichTextField) return { Data: value.Data, Text: value.Text, __type: 'RichTextField' };
- else if (value instanceof ImageField) return { url: value.url.href, __type: 'image' };
- else if (value instanceof PdfField) return { url: value.url.href, __type: 'pdf' };
- else if (value instanceof AudioField) return { url: value.url.href, __type: 'audio' };
+ else if (value instanceof ImageField) {
+ const extension = value.url.href.replace(/.*\./, '');
+ proms.push(value.url.href.replace('.' + extension, '_o.' + extension));
+ return { url: value.url.href, __type: 'image' };
+ } else if (value instanceof PdfField) {
+ proms.push(value.url.href);
+ return { url: value.url.href, __type: 'pdf' };
+ } else if (value instanceof AudioField) return { url: value.url.href, __type: 'audio' };
else if (value instanceof VideoField) return { url: value.url.href, __type: 'video' };
else if (value instanceof WebField) return { url: value.url.href, __type: 'web' };
else if (value instanceof MapField) return { url: value.url.href, __type: 'map' };
@@ -844,8 +862,34 @@ export namespace Doc {
const docs: { [id: string]: any } = {};
Array.from(map.entries()).forEach(f => (docs[f[0]] = f[1]));
- const docString = JSON.stringify({ id: doc[Id], docs }, replacer);
-
+ const docString = JSON.stringify({ id: clone[Id], docs }, decycle(replacer));
+
+ let generateZIP = (proms: string[]) => {
+ var zip = new JSZip();
+ var count = 0;
+ var zipFilename = 'dashExport.zip';
+
+ proms
+ .filter(url => url.startsWith(window.location.origin))
+ .forEach((url, i) => {
+ var filename = proms[i].replace(window.location.origin + '/', '').replace(/\//g, '%%%');
+ // loading a file and add it in a zip file
+ JSZipUtils.getBinaryContent(url, function (err: any, data: any) {
+ if (err) {
+ throw err; // or handle the error
+ }
+ zip.file(filename, data, { binary: true });
+ count++;
+ if (count == proms.length) {
+ zip.file('doc.json', docString);
+ zip.generateAsync({ type: 'blob' }).then(function (content) {
+ saveAs(content, zipFilename);
+ });
+ }
+ });
+ });
+ };
+ generateZIP(proms);
const zip = new JSZip();
zip.file('doc.json', docString);
@@ -862,57 +906,40 @@ export namespace Doc {
saveAs(content, doc.title + '.zip'); // glr: Possibly change the name of the document to match the title?
});
}
- //
- // Determines whether the layout needs to be expanded (as a template).
- // template expansion is rquired when the layout is a template doc/field and there's a datadoc which isn't equal to the layout template
- //
- export function WillExpandTemplateLayout(layoutDoc: Doc, dataDoc?: Doc) {
- return (layoutDoc.isTemplateForField || layoutDoc.isTemplateDoc) && dataDoc && layoutDoc !== dataDoc;
- }
const _pendingMap: Map<string, boolean> = new Map();
//
// Returns an expanded template layout for a target data document if there is a template relationship
// between the two. If so, the layoutDoc is expanded into a new document that inherits the properties
// of the original layout while allowing for individual layout properties to be overridden in the expanded layout.
- // templateArgs should be equivalent to the layout key that generates the template since that's where the template parameters are stored in ()'s at the end of the key.
- // NOTE: the template will have references to "@params" -- the template arguments will be assigned to the '@params' field
- // so that when the @params key is accessed, it will be rewritten as the key that is stored in the 'params' field and
- // the derefence will then occur on the rootDocument (the original document).
- // in the future, field references could be written as @<someparam> and then arguments would be passed in the layout key as:
- // layout_mytemplate(somparam=somearg).
- // then any references to @someparam would be rewritten as accesses to 'somearg' on the rootDocument
- export function expandTemplateLayout(templateLayoutDoc: Doc, targetDoc?: Doc, templateArgs?: string) {
- const args = templateArgs?.match(/\(([a-zA-Z0-9._\-]*)\)/)?.[1].replace('()', '') || StrCast(templateLayoutDoc.PARAMS);
- if ((!args && !WillExpandTemplateLayout(templateLayoutDoc, targetDoc)) || !targetDoc) return templateLayoutDoc;
-
- const templateField = StrCast(templateLayoutDoc.isTemplateForField); // the field that the template renders
+ export function expandTemplateLayout(templateLayoutDoc: Doc, targetDoc?: Doc) {
+ // nothing to do if the layout isn't a template or we don't have a target that's different than the template
+ if (!targetDoc || templateLayoutDoc === targetDoc || (!templateLayoutDoc.isTemplateForField && !templateLayoutDoc.isTemplateDoc)) {
+ return templateLayoutDoc;
+ }
+
+ const templateField = StrCast(templateLayoutDoc.isTemplateForField, Doc.LayoutFieldKey(templateLayoutDoc)); // the field that the template renders
// First it checks if an expanded layout already exists -- if so it will be stored on the dataDoc
// using the template layout doc's id as the field key.
// If it doesn't find the expanded layout, then it makes a delegate of the template layout and
// saves it on the data doc indexed by the template layout's id.
//
- const params = args.split('=').length > 1 ? args.split('=')[0] : 'PARAMS';
- const layoutFielddKey = Doc.LayoutFieldKey(templateLayoutDoc);
- const expandedLayoutFieldKey = (templateField || layoutFielddKey) + '-layout[' + templateLayoutDoc[Id] + (args ? `(${args})` : '') + ']';
+ const expandedLayoutFieldKey = templateField + '-layout[' + templateLayoutDoc[Id] + ']';
let expandedTemplateLayout = targetDoc?.[expandedLayoutFieldKey];
if (templateLayoutDoc.resolvedDataDoc instanceof Promise) {
expandedTemplateLayout = undefined;
_pendingMap.set(targetDoc[Id] + expandedLayoutFieldKey, true);
- } else if (expandedTemplateLayout === undefined && !_pendingMap.get(targetDoc[Id] + expandedLayoutFieldKey + args)) {
- if (templateLayoutDoc.resolvedDataDoc === (targetDoc.rootDocument || Doc.GetProto(targetDoc)) && templateLayoutDoc.PARAMS === StrCast(targetDoc.PARAMS)) {
+ } else if (expandedTemplateLayout === undefined && !_pendingMap.get(targetDoc[Id] + expandedLayoutFieldKey)) {
+ if (templateLayoutDoc.resolvedDataDoc === (targetDoc.rootDocument || Doc.GetProto(targetDoc))) {
expandedTemplateLayout = templateLayoutDoc; // reuse an existing template layout if its for the same document with the same params
} else {
templateLayoutDoc.resolvedDataDoc && (templateLayoutDoc = Cast(templateLayoutDoc.proto, Doc, null) || templateLayoutDoc); // if the template has already been applied (ie, a nested template), then use the template's prototype
if (!targetDoc[expandedLayoutFieldKey]) {
- _pendingMap.set(targetDoc[Id] + expandedLayoutFieldKey + args, true);
+ _pendingMap.set(targetDoc[Id] + expandedLayoutFieldKey, true);
setTimeout(
action(() => {
const newLayoutDoc = Doc.MakeDelegate(templateLayoutDoc, undefined, '[' + templateLayoutDoc.title + ']');
- // the template's arguments are stored in params which is derefenced to find
- // the actual field key where the parameterized template data is stored.
- newLayoutDoc[params] = args !== '...' ? args : ''; // ... signifies the layout has sub template(s) -- so we have to expand the layout for them so that they can get the correct 'rootDocument' field, but we don't need to reassign their params. it would be better if the 'rootDocument' field could be passed dynamically to avoid have to create instances
newLayoutDoc.rootDocument = targetDoc;
const dataDoc = Doc.GetProto(targetDoc);
newLayoutDoc.resolvedDataDoc = dataDoc;
@@ -921,7 +948,7 @@ export namespace Doc {
}
targetDoc[expandedLayoutFieldKey] = newLayoutDoc;
- _pendingMap.delete(targetDoc[Id] + expandedLayoutFieldKey + args);
+ _pendingMap.delete(targetDoc[Id] + expandedLayoutFieldKey);
})
);
}
@@ -937,8 +964,8 @@ export namespace Doc {
console.log('No, no, no!');
return { layout: childDoc, data: childDoc };
}
- const resolvedDataDoc = Doc.AreProtosEqual(containerDataDoc, containerDoc) || (!childDoc.isTemplateDoc && !childDoc.isTemplateForField && !childDoc.PARAMS) ? undefined : containerDataDoc;
- return { layout: Doc.expandTemplateLayout(childDoc, resolvedDataDoc, '(' + StrCast(containerDoc.PARAMS) + ')'), data: resolvedDataDoc };
+ const resolvedDataDoc = Doc.AreProtosEqual(containerDataDoc, containerDoc) || (!childDoc.isTemplateDoc && !childDoc.isTemplateForField) ? undefined : containerDataDoc;
+ return { layout: Doc.expandTemplateLayout(childDoc, resolvedDataDoc), data: resolvedDataDoc };
}
export function Overwrite(doc: Doc, overwrite: Doc, copyProto: boolean = false): Doc {
@@ -1169,7 +1196,7 @@ export namespace Doc {
return doc[StrCast(doc.layoutKey, 'layout')];
}
export function LayoutFieldKey(doc: Doc): string {
- return StrCast(Doc.Layout(doc).layout).split("'")[1];
+ return StrCast(Doc.Layout(doc)[StrCast(doc.layoutKey, 'layout')]).split("'")[1]; // bcz: TODO check on this . used to always reference 'layout', now it uses the layout speicfied by the current layoutKey
}
export function NativeAspect(doc: Doc, dataDoc?: Doc, useDim?: boolean) {
return Doc.NativeWidth(doc, dataDoc, useDim) / (Doc.NativeHeight(doc, dataDoc, useDim) || 1);
@@ -1178,9 +1205,10 @@ export namespace Doc {
return !doc ? 0 : NumCast(doc._nativeWidth, NumCast((dataDoc || doc)[Doc.LayoutFieldKey(doc) + '-nativeWidth'], useWidth ? doc[WidthSym]() : 0));
}
export function NativeHeight(doc?: Doc, dataDoc?: Doc, useHeight?: boolean) {
- const dheight = doc ? NumCast((dataDoc || doc)[Doc.LayoutFieldKey(doc) + '-nativeHeight'], useHeight ? doc[HeightSym]() : 0) : 0;
- const nheight = doc ? (Doc.NativeWidth(doc, dataDoc, useHeight) * doc[HeightSym]()) / doc[WidthSym]() : 0;
- return !doc ? 0 : NumCast(doc._nativeHeight, nheight || dheight);
+ if (!doc) return 0;
+ const nheight = (Doc.NativeWidth(doc, dataDoc, useHeight) * doc[HeightSym]()) / doc[WidthSym]();
+ const dheight = NumCast((dataDoc || doc)[Doc.LayoutFieldKey(doc) + '-nativeHeight'], useHeight ? doc[HeightSym]() : 0);
+ return NumCast(doc._nativeHeight, nheight || dheight);
}
export function SetNativeWidth(doc: Doc, width: number | undefined, fieldKey?: string) {
doc[(fieldKey ?? Doc.LayoutFieldKey(doc)) + '-nativeWidth'] = width;
@@ -1290,10 +1318,12 @@ export namespace Doc {
}
export function LinkEndpoint(linkDoc: Doc, anchorDoc: Doc) {
+ if (linkDoc.anchor2 === anchorDoc || (linkDoc.anchor2 as Doc).annotationOn) return '2';
return Doc.AreProtosEqual(anchorDoc, (linkDoc.anchor1 as Doc).annotationOn as Doc) || Doc.AreProtosEqual(anchorDoc, linkDoc.anchor1 as Doc) ? '1' : '2';
}
export function linkFollowUnhighlight() {
+ clearTimeout(UnhighlightTimer);
UnhighlightWatchers.forEach(watcher => watcher());
UnhighlightWatchers.length = 0;
highlightedDocs.forEach(doc => Doc.UnHighlightDoc(doc));
@@ -1335,16 +1365,19 @@ export namespace Doc {
}
});
}
- export function UnHighlightDoc(doc: Doc) {
+ /// if doc is defined, then it is unhighlighted, otherwise all highlighted docs are unhighlighted
+ export function UnHighlightDoc(doc?: Doc) {
runInAction(() => {
- highlightedDocs.delete(doc);
- highlightedDocs.delete(Doc.GetProto(doc));
- doc[HighlightSym] = Doc.GetProto(doc)[HighlightSym] = false;
- doc[AnimationSym] = undefined;
+ (doc ? [doc] : Array.from(highlightedDocs)).forEach(doc => {
+ highlightedDocs.delete(doc);
+ highlightedDocs.delete(Doc.GetProto(doc));
+ doc[HighlightSym] = Doc.GetProto(doc)[HighlightSym] = false;
+ doc[AnimationSym] = undefined;
+ });
});
}
export function UnBrushAllDocs() {
- brushManager.BrushedDoc.clear();
+ runInAction(() => brushManager.BrushedDoc.clear());
}
export function getDocTemplate(doc?: Doc) {
@@ -1410,14 +1443,14 @@ export namespace Doc {
// filters document in a container collection:
// all documents with the specified value for the specified key are included/excluded
// based on the modifiers :"check", "x", undefined
- export function setDocFilter(container: Opt<Doc>, key: string, value: any, modifiers: 'remove' | 'match' | 'check' | 'x' | 'exists' | 'unset', toggle?: boolean, fieldSuffix?: string, append: boolean = true) {
+ export function setDocFilter(container: Opt<Doc>, key: string, value: any, modifiers: 'remove' | 'match' | 'check' | 'x' | 'exists' | 'unset', toggle?: boolean, fieldPrefix?: string, append: boolean = true) {
if (!container) return;
- const filterField = '_' + (fieldSuffix ? fieldSuffix + '-' : '') + 'docFilters';
+ const filterField = '_' + (fieldPrefix ? fieldPrefix + '-' : '') + 'docFilters';
const docFilters = Cast(container[filterField], listSpec('string'), []);
runInAction(() => {
for (let i = 0; i < docFilters.length; i++) {
const fields = docFilters[i].split(':'); // split key:value:modifier
- if (fields[0] === key && (fields[1] === value || modifiers === 'match')) {
+ if (fields[0] === key && (fields[1] === value || modifiers === 'match' || (fields[2] === 'match' && modifiers === 'remove'))) {
if (fields[2] === modifiers && modifiers && fields[1] === value) {
if (toggle) modifiers = 'remove';
else return;
@@ -1526,8 +1559,12 @@ export namespace Doc {
formData.append('remap', 'true');
const response = await fetch(upload, { method: 'POST', body: formData });
const json = await response.json();
+ console.log(json);
if (json !== 'error') {
- const doc = await DocServer.GetRefField(json);
+ await DocServer.GetRefFields(json.docids as string[]);
+ const doc = DocCast(await DocServer.GetRefField(json.id));
+ (await DocListCastAsync(doc?.LINKS))?.forEach(link => LinkManager.Instance.addLink(link));
+ doc.LINKS = undefined;
return doc;
}
}