aboutsummaryrefslogtreecommitdiff
path: root/src/fields
diff options
context:
space:
mode:
Diffstat (limited to 'src/fields')
-rw-r--r--src/fields/DateField.ts14
-rw-r--r--src/fields/Doc.ts264
-rw-r--r--src/fields/InkField.ts5
-rw-r--r--src/fields/List.ts12
-rw-r--r--src/fields/ScriptField.ts2
-rw-r--r--src/fields/URLField.ts9
-rw-r--r--src/fields/documentSchemas.ts1
-rw-r--r--src/fields/util.ts5
8 files changed, 147 insertions, 165 deletions
diff --git a/src/fields/DateField.ts b/src/fields/DateField.ts
index 26f51b2d3..2ea619bd9 100644
--- a/src/fields/DateField.ts
+++ b/src/fields/DateField.ts
@@ -1,11 +1,11 @@
-import { Deserializable } from "../client/util/SerializationHelper";
-import { serializable, date } from "serializr";
-import { ObjectField } from "./ObjectField";
-import { Copy, ToScriptString, ToString } from "./FieldSymbols";
-import { scriptingGlobal, ScriptingGlobals } from "../client/util/ScriptingGlobals";
+import { Deserializable } from '../client/util/SerializationHelper';
+import { serializable, date } from 'serializr';
+import { ObjectField } from './ObjectField';
+import { Copy, ToScriptString, ToString } from './FieldSymbols';
+import { scriptingGlobal, ScriptingGlobals } from '../client/util/ScriptingGlobals';
@scriptingGlobal
-@Deserializable("date")
+@Deserializable('date')
export class DateField extends ObjectField {
@serializable(date())
readonly date: Date;
@@ -24,7 +24,7 @@ export class DateField extends ObjectField {
}
[ToScriptString]() {
- return `new DateField(new Date(${this.date.toISOString()}))`;
+ return `new DateField(new Date("${this.date.toISOString()}"))`;
}
[ToString]() {
return this.date.toLocaleString();
diff --git a/src/fields/Doc.ts b/src/fields/Doc.ts
index 68279372a..7cc7e0d2a 100644
--- a/src/fields/Doc.ts
+++ b/src/fields/Doc.ts
@@ -14,8 +14,8 @@ import { decycle } from '../decycler/decycler';
import { DashColor, incrementTitleCopy, intersectRect, Utils } from '../Utils';
import { DateField } from './DateField';
import { Copy, HandleUpdate, Id, OnUpdate, Parent, Self, SelfProxy, ToScriptString, ToString, Update } from './FieldSymbols';
-import { InkTool } from './InkField';
-import { List } from './List';
+import { InkField, InkTool } from './InkField';
+import { List, ListFieldName } from './List';
import { ObjectField } from './ObjectField';
import { PrefetchProxy, ProxyField } from './Proxy';
import { FieldId, RefField } from './RefField';
@@ -23,15 +23,19 @@ import { RichTextField } from './RichTextField';
import { listSpec } from './Schema';
import { ComputedField, ScriptField } from './ScriptField';
import { Cast, DocCast, FieldValue, NumCast, StrCast, ToConstructor } from './Types';
-import { AudioField, ImageField, MapField, PdfField, VideoField, WebField } from './URLField';
+import { AudioField, CsvField, ImageField, PdfField, VideoField, WebField } from './URLField';
import { deleteProperty, GetEffectiveAcl, getField, getter, makeEditable, makeReadOnly, normalizeEmail, setter, SharingPermissions, updateFunction } from './util';
import JSZip = require('jszip');
import * as JSZipUtils from '../JSZipUtils';
export namespace Field {
export function toKeyValueString(doc: Doc, key: string): string {
- const onDelegate = Object.keys(doc).includes(key);
+ const onDelegate = Object.keys(doc).includes(key.replace(/^_/, ''));
const field = ComputedField.WithoutComputed(() => FieldValue(doc[key]));
- return !Field.IsField(field) ? '' : (onDelegate ? '=' : '') + (field instanceof ComputedField ? `:=${field.script.originalScript}` : Field.toScriptString(field));
+ return !Field.IsField(field)
+ ? key.startsWith('_')
+ ? '='
+ : ''
+ : (onDelegate ? '=' : '') + (field instanceof ComputedField ? `:=${field.script.originalScript}` : field instanceof ScriptField ? `$=${field.script.originalScript}` : Field.toScriptString(field));
}
export function toScriptString(field: Field): string {
switch (typeof field) {
@@ -96,6 +100,7 @@ export const HighlightSym = Symbol('Highlight');
export const DataSym = Symbol('Data');
export const LayoutSym = Symbol('Layout');
export const FieldsSym = Symbol('Fields');
+export const CssSym = Symbol('Css');
export const AclSym = Symbol('Acl');
export const DirectLinksSym = Symbol('DirectLinks');
export const AclUnset = Symbol('AclUnset');
@@ -337,6 +342,7 @@ export class Doc extends RefField {
@observable private ___fieldKeys: any = {};
/// all of the raw acl's that have been set on this document. Use GetEffectiveAcl to determine the actual ACL of the doc for editing
@observable public [AclSym]: { [key: string]: symbol } = {};
+ @observable public [CssSym]: number = 0; // incrementer denoting a change to CSS layout
@observable public [DirectLinksSym]: Set<Doc> = new Set();
@observable public [AnimationSym]: Opt<Doc>;
@observable public [HighlightSym]: boolean = false;
@@ -694,12 +700,12 @@ export namespace Doc {
// this lists out all the tag ids that can be in a RichTextField that might contain document ids.
// if a document is cloned, we need to make sure to clone all of these referenced documents as well;
export const DocsInTextFieldIds = ['audioId', 'textId', 'anchorId', 'docId'];
- export async function makeClone(doc: Doc, cloneMap: Map<string, Doc>, linkMap: Map<string, Doc>, rtfs: { copy: Doc; key: string; field: RichTextField }[], exclusions: string[], cloneLinks: boolean): Promise<Doc> {
+ export async function makeClone(doc: Doc, cloneMap: Map<string, Doc>, linkMap: Map<string, Doc>, rtfs: { copy: Doc; key: string; field: RichTextField }[], exclusions: string[], pruneDocs: Doc[], cloneLinks: boolean): Promise<Doc> {
if (Doc.IsBaseProto(doc)) return doc;
if (cloneMap.get(doc[Id])) return cloneMap.get(doc[Id])!;
const copy = new Doc(undefined, true);
cloneMap.set(doc[Id], copy);
- const filter = [...exclusions, ...Cast(doc.cloneFieldFilter, listSpec('string'), [])];
+ const filter = [...exclusions, ...StrListCast(doc.cloneFieldFilter)];
await Promise.all(
Object.keys(doc).map(async key => {
if (filter.includes(key)) return;
@@ -710,7 +716,7 @@ export namespace Doc {
const list = await Cast(doc[key], listSpec(Doc));
const docs = list && (await DocListCastAsync(list))?.filter(d => d instanceof Doc);
if (docs !== undefined && docs.length) {
- const clones = await Promise.all(docs.map(async d => Doc.makeClone(d, cloneMap, linkMap, rtfs, exclusions, cloneLinks)));
+ const clones = await Promise.all(docs.map(async d => Doc.makeClone(d, cloneMap, linkMap, rtfs, exclusions, pruneDocs, cloneLinks)));
assignKey(new List<Doc>(clones));
} else {
assignKey(ObjectField.MakeCopy(field));
@@ -719,15 +725,13 @@ export namespace Doc {
const docidsearch = new RegExp('(' + DocsInTextFieldIds.map(exp => '(' + exp + ')').join('|') + ')":"([a-z-A-Z0-9_]*)"', 'g');
const rawdocids = field.Data.match(docidsearch);
const docids = rawdocids?.map((str: string) =>
- DocsInTextFieldIds.reduce((output, exp) => {
- return output.replace(new RegExp(`${exp}":`, 'g'), '');
- }, str)
+ DocsInTextFieldIds.reduce((output, exp) => output.replace(new RegExp(`${exp}":`, 'g'), ''), str)
.replace(/"/g, '')
.trim()
);
const results = docids && (await DocServer.GetRefFields(docids));
const docs = results && Array.from(Object.keys(results)).map(key => DocCast(results[key]));
- docs && docs.map(doc => Doc.makeClone(doc, cloneMap, linkMap, rtfs, exclusions, cloneLinks));
+ docs?.map(doc => doc && Doc.makeClone(doc, cloneMap, linkMap, rtfs, exclusions, pruneDocs, cloneLinks));
rtfs.push({ copy, key, field });
}
}
@@ -735,16 +739,17 @@ export namespace Doc {
};
const docAtKey = doc[key];
if (docAtKey instanceof Doc) {
- if (!Doc.IsSystem(docAtKey) && (key.startsWith('layout') || key === 'annotationOn' || key === 'proto' || ((key === 'anchor1' || key === 'anchor2') && doc.author === Doc.CurrentUserEmail))) {
- assignKey(await Doc.makeClone(docAtKey, cloneMap, linkMap, rtfs, exclusions, cloneLinks));
+ if (pruneDocs.includes(docAtKey)) {
+ // prune doc and do nothing
+ } else if (!Doc.IsSystem(docAtKey) && (key.startsWith('layout') || ['context', 'annotationOn', 'proto'].includes(key) || ((key === 'anchor1' || key === 'anchor2') && doc.author === Doc.CurrentUserEmail))) {
+ assignKey(await Doc.makeClone(docAtKey, cloneMap, linkMap, rtfs, exclusions, pruneDocs, cloneLinks));
} else {
assignKey(docAtKey);
}
} else if (field instanceof RefField) {
assignKey(field);
} else if (cfield instanceof ComputedField) {
- assignKey(cfield[Copy]());
- // ComputedField.MakeFunction(cfield.script.originalScript));
+ assignKey(cfield[Copy]()); // ComputedField.MakeFunction(cfield.script.originalScript));
} else if (field instanceof ObjectField) {
await copyObjectField(field);
} else if (field instanceof Promise) {
@@ -759,7 +764,7 @@ export namespace Doc {
cloneLinks ||
((cloneMap.has(DocCast(link.anchor1)?.[Id]) || cloneMap.has(DocCast(DocCast(link.anchor1)?.annotationOn)?.[Id])) && (cloneMap.has(DocCast(link.anchor2)?.[Id]) || cloneMap.has(DocCast(DocCast(link.anchor2)?.annotationOn)?.[Id])))
) {
- linkMap.set(link[Id], await Doc.makeClone(link, cloneMap, linkMap, rtfs, exclusions, cloneLinks));
+ linkMap.set(link[Id], await Doc.makeClone(link, cloneMap, linkMap, rtfs, exclusions, pruneDocs, cloneLinks));
}
});
Doc.SetInPlace(copy, 'title', 'CLONE: ' + doc.title, true);
@@ -795,11 +800,9 @@ export namespace Doc {
export async function MakeClone(doc: Doc, cloneLinks = true, cloneMap: Map<string, Doc> = new Map()) {
const linkMap = new Map<string, Doc>();
const rtfMap: { copy: Doc; key: string; field: RichTextField }[] = [];
- const copy = await Doc.makeClone(doc, cloneMap, linkMap, rtfMap, ['cloneOf'], cloneLinks);
+ const copy = await Doc.makeClone(doc, cloneMap, linkMap, rtfMap, ['cloneOf'], doc.context ? [DocCast(doc.context)] : [], cloneLinks);
const repaired = new Set<Doc>();
const linkedDocs = Array.from(linkMap.values());
- const clonedDocs = [...Array.from(cloneMap.values()), ...linkedDocs];
- clonedDocs.map(clone => Doc.repairClone(clone, cloneMap, repaired));
linkedDocs.map((link: Doc) => LinkManager.Instance.addLink(link, true));
rtfMap.map(({ copy, key, field }) => {
const replacer = (match: any, attr: string, id: string, offset: any, string: any) => {
@@ -810,104 +813,80 @@ export namespace Doc {
const mapped = cloneMap.get(id);
return href + (mapped ? mapped[Id] : id);
};
- const regex = `(${Doc.localServerPath()})([^"]*)`;
- const re = new RegExp(regex, 'g');
+ const re = new RegExp(`(${Doc.localServerPath()})([^"]*)`, 'g');
const docidsearch = new RegExp('(' + DocsInTextFieldIds.map(exp => `"${exp}":`).join('|') + ')"([^"]+)"', 'g');
copy[key] = new RichTextField(field.Data.replace(docidsearch, replacer).replace(re, replacer2), field.Text);
});
+ const clonedDocs = [...Array.from(cloneMap.values()), ...linkedDocs];
+ clonedDocs.map(clone => Doc.repairClone(clone, cloneMap, repaired));
return { clone: copy, map: cloneMap, linkMap };
}
- export async function Zip(doc: Doc) {
- // const a = document.createElement("a");
- // const url = Utils.prepend(`/downloadId/${this.props.Document[Id]}`);
- // a.href = url;
- // a.download = `DocExport-${this.props.Document[Id]}.zip`;
- // a.click();
+ export async function Zip(doc: Doc, zipFilename = 'dashExport.zip') {
const { clone, map, linkMap } = await Doc.MakeClone(doc);
- clone.LINKS = new List<Doc>(Array.from(linkMap.values()));
- const proms = [] as string[];
+ const proms = new Set<string>();
function replacer(key: any, value: any) {
if (key && ['branchOf', 'cloneOf', 'cursors'].includes(key)) return undefined;
- else if (value instanceof Doc) {
- if (key !== 'field' && Number.isNaN(Number(key))) {
- const __fields = value[FieldsSym]();
- return { id: value[Id], __type: 'Doc', fields: __fields };
- } else {
- return { fieldId: value[Id], __type: 'proxy' };
- }
- } else if (value instanceof ScriptField) return { script: value.script, __type: 'script' };
- else if (value instanceof RichTextField) return { Data: value.Data, Text: value.Text, __type: 'RichTextField' };
- else if (value instanceof ImageField) {
+ if (value instanceof ImageField) {
const extension = value.url.href.replace(/.*\./, '');
- proms.push(value.url.href.replace('.' + extension, '_o.' + extension));
- return { url: value.url.href, __type: 'image' };
- } else if (value instanceof PdfField) {
- proms.push(value.url.href);
- return { url: value.url.href, __type: 'pdf' };
- } else if (value instanceof AudioField) return { url: value.url.href, __type: 'audio' };
- else if (value instanceof VideoField) return { url: value.url.href, __type: 'video' };
- else if (value instanceof WebField) return { url: value.url.href, __type: 'web' };
- else if (value instanceof MapField) return { url: value.url.href, __type: 'map' };
- else if (value instanceof DateField) return { date: value.toString(), __type: 'date' };
- else if (value instanceof ProxyField) return { fieldId: value.fieldId, __type: 'proxy' };
- else if (value instanceof Array && key !== 'fields') return { fields: value, __type: 'list' };
- else if (value instanceof ComputedField) return { script: value.script, __type: 'computed' };
- else return value;
+ proms.add(value.url.href.replace('.' + extension, '_o.' + extension));
+ return SerializationHelper.Serialize(value);
+ }
+ if (value instanceof PdfField || value instanceof AudioField || value instanceof VideoField) {
+ proms.add(value.url.href);
+ return SerializationHelper.Serialize(value);
+ }
+ if (
+ value instanceof Doc ||
+ value instanceof ScriptField ||
+ value instanceof RichTextField ||
+ value instanceof InkField ||
+ value instanceof CsvField ||
+ value instanceof WebField ||
+ value instanceof DateField ||
+ value instanceof ProxyField ||
+ value instanceof ComputedField
+ ) {
+ return SerializationHelper.Serialize(value);
+ }
+ if (value instanceof Array && key !== ListFieldName && key !== InkField.InkDataFieldName) return { fields: value, __type: 'list' };
+ return value;
}
const docs: { [id: string]: any } = {};
+ const links: { [id: string]: any } = {};
Array.from(map.entries()).forEach(f => (docs[f[0]] = f[1]));
- const docString = JSON.stringify({ id: clone[Id], docs }, decycle(replacer));
-
- let generateZIP = (proms: string[]) => {
- var zip = new JSZip();
- var count = 0;
- var zipFilename = 'dashExport.zip';
-
- proms
- .filter(url => url.startsWith(window.location.origin))
- .forEach((url, i) => {
- var filename = proms[i].replace(window.location.origin + '/', '').replace(/\//g, '%%%');
- // loading a file and add it in a zip file
- JSZipUtils.getBinaryContent(url, function (err: any, data: any) {
- if (err) {
- throw err; // or handle the error
- }
- zip.file(filename, data, { binary: true });
- count++;
- if (count == proms.length) {
- zip.file('doc.json', docString);
- zip.generateAsync({ type: 'blob' }).then(function (content) {
- saveAs(content, zipFilename);
- });
- }
- });
- });
- };
- generateZIP(proms);
- const zip = new JSZip();
-
- zip.file('doc.json', docString);
-
- // // Generate a directory within the Zip file structure
- // var img = zip.folder("images");
-
- // // Add a file to the directory, in this case an image with data URI as contents
- // img.file("smile.gif", imgData, {base64: true});
+ Array.from(linkMap.entries()).forEach(l => (links[l[0]] = l[1]));
+ const jsonDocs = JSON.stringify({ id: clone[Id], docs, links }, decycle(replacer));
- // Generate the zip file asynchronously
- zip.generateAsync({ type: 'blob' }).then((content: any) => {
- // Force down of the Zip file
- saveAs(content, doc.title + '.zip'); // glr: Possibly change the name of the document to match the title?
- });
- }
- //
- // Determines whether the layout needs to be expanded (as a template).
- // template expansion is rquired when the layout is a template doc/field and there's a datadoc which isn't equal to the layout template
- //
- export function WillExpandTemplateLayout(layoutDoc: Doc, dataDoc?: Doc) {
- return (layoutDoc.isTemplateForField || layoutDoc.isTemplateDoc) && dataDoc && layoutDoc !== dataDoc;
+ const zip = new JSZip();
+ var count = 0;
+ const promArr = Array.from(proms).filter(url => url.startsWith(window.location.origin));
+ if (!promArr.length) {
+ zip.file('docs.json', jsonDocs);
+ zip.generateAsync({ type: 'blob' }).then(content => saveAs(content, zipFilename));
+ } else
+ promArr.forEach((url, i) => {
+ // loading a file and add it in a zip file
+ JSZipUtils.getBinaryContent(url, (err: any, data: any) => {
+ if (err) throw err; // or handle the error
+ // // Generate a directory within the Zip file structure
+ // const assets = zip.folder("assets");
+ // assets.file(filename, data, {binary: true});
+ const assetPathOnServer = promArr[i].replace(window.location.origin + '/', '').replace(/\//g, '%%%');
+ zip.file(assetPathOnServer, data, { binary: true });
+ console.log(' => ' + url);
+ if (++count === promArr.length) {
+ zip.file('docs.json', jsonDocs);
+ zip.generateAsync({ type: 'blob' }).then(content => saveAs(content, zipFilename));
+ // const a = document.createElement("a");
+ // const url = Utils.prepend(`/downloadId/${this.props.Document[Id]}`);
+ // a.href = url;
+ // a.download = `DocExport-${this.props.Document[Id]}.zip`;
+ // a.click();
+ }
+ });
+ });
}
const _pendingMap: Map<string, boolean> = new Map();
@@ -915,44 +894,34 @@ export namespace Doc {
// Returns an expanded template layout for a target data document if there is a template relationship
// between the two. If so, the layoutDoc is expanded into a new document that inherits the properties
// of the original layout while allowing for individual layout properties to be overridden in the expanded layout.
- // templateArgs should be equivalent to the layout key that generates the template since that's where the template parameters are stored in ()'s at the end of the key.
- // NOTE: the template will have references to "@params" -- the template arguments will be assigned to the '@params' field
- // so that when the @params key is accessed, it will be rewritten as the key that is stored in the 'params' field and
- // the derefence will then occur on the rootDocument (the original document).
- // in the future, field references could be written as @<someparam> and then arguments would be passed in the layout key as:
- // layout_mytemplate(somparam=somearg).
- // then any references to @someparam would be rewritten as accesses to 'somearg' on the rootDocument
- export function expandTemplateLayout(templateLayoutDoc: Doc, targetDoc?: Doc, templateArgs?: string) {
- const args = templateArgs?.match(/\(([a-zA-Z0-9._\-]*)\)/)?.[1].replace('()', '') || StrCast(templateLayoutDoc.PARAMS);
- if ((!args && !WillExpandTemplateLayout(templateLayoutDoc, targetDoc)) || !targetDoc) return templateLayoutDoc;
-
- const templateField = StrCast(templateLayoutDoc.isTemplateForField); // the field that the template renders
+ export function expandTemplateLayout(templateLayoutDoc: Doc, targetDoc?: Doc) {
+ // nothing to do if the layout isn't a template or we don't have a target that's different than the template
+ if (!targetDoc || templateLayoutDoc === targetDoc || (!templateLayoutDoc.isTemplateForField && !templateLayoutDoc.isTemplateDoc)) {
+ return templateLayoutDoc;
+ }
+
+ const templateField = StrCast(templateLayoutDoc.isTemplateForField, Doc.LayoutFieldKey(templateLayoutDoc)); // the field that the template renders
// First it checks if an expanded layout already exists -- if so it will be stored on the dataDoc
// using the template layout doc's id as the field key.
// If it doesn't find the expanded layout, then it makes a delegate of the template layout and
// saves it on the data doc indexed by the template layout's id.
//
- const params = args.split('=').length > 1 ? args.split('=')[0] : 'PARAMS';
- const layoutFielddKey = Doc.LayoutFieldKey(templateLayoutDoc);
- const expandedLayoutFieldKey = (templateField || layoutFielddKey) + '-layout[' + templateLayoutDoc[Id] + (args ? `(${args})` : '') + ']';
+ const expandedLayoutFieldKey = templateField + '-layout[' + templateLayoutDoc[Id] + ']';
let expandedTemplateLayout = targetDoc?.[expandedLayoutFieldKey];
if (templateLayoutDoc.resolvedDataDoc instanceof Promise) {
expandedTemplateLayout = undefined;
_pendingMap.set(targetDoc[Id] + expandedLayoutFieldKey, true);
- } else if (expandedTemplateLayout === undefined && !_pendingMap.get(targetDoc[Id] + expandedLayoutFieldKey + args)) {
- if (templateLayoutDoc.resolvedDataDoc === (targetDoc.rootDocument || Doc.GetProto(targetDoc)) && templateLayoutDoc.PARAMS === StrCast(targetDoc.PARAMS)) {
+ } else if (expandedTemplateLayout === undefined && !_pendingMap.get(targetDoc[Id] + expandedLayoutFieldKey)) {
+ if (templateLayoutDoc.resolvedDataDoc === (targetDoc.rootDocument || Doc.GetProto(targetDoc))) {
expandedTemplateLayout = templateLayoutDoc; // reuse an existing template layout if its for the same document with the same params
} else {
templateLayoutDoc.resolvedDataDoc && (templateLayoutDoc = Cast(templateLayoutDoc.proto, Doc, null) || templateLayoutDoc); // if the template has already been applied (ie, a nested template), then use the template's prototype
if (!targetDoc[expandedLayoutFieldKey]) {
- _pendingMap.set(targetDoc[Id] + expandedLayoutFieldKey + args, true);
+ _pendingMap.set(targetDoc[Id] + expandedLayoutFieldKey, true);
setTimeout(
action(() => {
const newLayoutDoc = Doc.MakeDelegate(templateLayoutDoc, undefined, '[' + templateLayoutDoc.title + ']');
- // the template's arguments are stored in params which is derefenced to find
- // the actual field key where the parameterized template data is stored.
- newLayoutDoc[params] = args !== '...' ? args : ''; // ... signifies the layout has sub template(s) -- so we have to expand the layout for them so that they can get the correct 'rootDocument' field, but we don't need to reassign their params. it would be better if the 'rootDocument' field could be passed dynamically to avoid have to create instances
newLayoutDoc.rootDocument = targetDoc;
const dataDoc = Doc.GetProto(targetDoc);
newLayoutDoc.resolvedDataDoc = dataDoc;
@@ -961,7 +930,7 @@ export namespace Doc {
}
targetDoc[expandedLayoutFieldKey] = newLayoutDoc;
- _pendingMap.delete(targetDoc[Id] + expandedLayoutFieldKey + args);
+ _pendingMap.delete(targetDoc[Id] + expandedLayoutFieldKey);
})
);
}
@@ -974,11 +943,11 @@ export namespace Doc {
// otherwise, it just returns the childDoc
export function GetLayoutDataDocPair(containerDoc: Doc, containerDataDoc: Opt<Doc>, childDoc: Doc) {
if (!childDoc || childDoc instanceof Promise || !Doc.GetProto(childDoc)) {
- console.log('No, no, no!');
+ console.log('Warning: GetLayoutDataDocPair childDoc not defined');
return { layout: childDoc, data: childDoc };
}
- const resolvedDataDoc = Doc.AreProtosEqual(containerDataDoc, containerDoc) || (!childDoc.isTemplateDoc && !childDoc.isTemplateForField && !childDoc.PARAMS) ? undefined : containerDataDoc;
- return { layout: Doc.expandTemplateLayout(childDoc, resolvedDataDoc, '(' + StrCast(containerDoc.PARAMS) + ')'), data: resolvedDataDoc };
+ const resolvedDataDoc = Doc.AreProtosEqual(containerDataDoc, containerDoc) || (!childDoc.isTemplateDoc && !childDoc.isTemplateForField) ? undefined : containerDataDoc;
+ return { layout: Doc.expandTemplateLayout(childDoc, resolvedDataDoc), data: resolvedDataDoc };
}
export function Overwrite(doc: Doc, overwrite: Doc, copyProto: boolean = false): Doc {
@@ -1209,7 +1178,7 @@ export namespace Doc {
return doc[StrCast(doc.layoutKey, 'layout')];
}
export function LayoutFieldKey(doc: Doc): string {
- return StrCast(Doc.Layout(doc).layout).split("'")[1];
+ return StrCast(Doc.Layout(doc).layout).split("'")[1]; // bcz: TODO check on this . used to always reference 'layout', now it uses the layout speicfied by the current layoutKey
}
export function NativeAspect(doc: Doc, dataDoc?: Doc, useDim?: boolean) {
return Doc.NativeWidth(doc, dataDoc, useDim) / (Doc.NativeHeight(doc, dataDoc, useDim) || 1);
@@ -1218,9 +1187,10 @@ export namespace Doc {
return !doc ? 0 : NumCast(doc._nativeWidth, NumCast((dataDoc || doc)[Doc.LayoutFieldKey(doc) + '-nativeWidth'], useWidth ? doc[WidthSym]() : 0));
}
export function NativeHeight(doc?: Doc, dataDoc?: Doc, useHeight?: boolean) {
- const dheight = doc ? NumCast((dataDoc || doc)[Doc.LayoutFieldKey(doc) + '-nativeHeight'], useHeight ? doc[HeightSym]() : 0) : 0;
- const nheight = doc ? (Doc.NativeWidth(doc, dataDoc, useHeight) * doc[HeightSym]()) / doc[WidthSym]() : 0;
- return !doc ? 0 : NumCast(doc._nativeHeight, nheight || dheight);
+ if (!doc) return 0;
+ const nheight = (Doc.NativeWidth(doc, dataDoc, useHeight) * doc[HeightSym]()) / doc[WidthSym]();
+ const dheight = NumCast((dataDoc || doc)[Doc.LayoutFieldKey(doc) + '-nativeHeight'], useHeight ? doc[HeightSym]() : 0);
+ return NumCast(doc._nativeHeight, nheight || dheight);
}
export function SetNativeWidth(doc: Doc, width: number | undefined, fieldKey?: string) {
doc[(fieldKey ?? Doc.LayoutFieldKey(doc)) + '-nativeWidth'] = width;
@@ -1332,6 +1302,7 @@ export namespace Doc {
}
export function LinkEndpoint(linkDoc: Doc, anchorDoc: Doc) {
+ if (linkDoc.anchor2 === anchorDoc || (linkDoc.anchor2 as Doc).annotationOn) return '2';
return Doc.AreProtosEqual(anchorDoc, (linkDoc.anchor1 as Doc).annotationOn as Doc) || Doc.AreProtosEqual(anchorDoc, linkDoc.anchor1 as Doc) ? '1' : '2';
}
@@ -1390,7 +1361,7 @@ export namespace Doc {
});
}
export function UnBrushAllDocs() {
- brushManager.BrushedDoc.clear();
+ runInAction(() => brushManager.BrushedDoc.clear());
}
export function getDocTemplate(doc?: Doc) {
@@ -1415,7 +1386,7 @@ export namespace Doc {
if (typeof value === 'string') {
value = value.replace(`,${Utils.noRecursionHack}`, '');
}
- const fieldVal = key === '#' ? (StrCast(doc.tags).includes(':#' + value + ':') ? StrCast(doc.tags) : undefined) : doc[key];
+ const fieldVal = doc[key];
if (Cast(fieldVal, listSpec('string'), []).length) {
const vals = Cast(fieldVal, listSpec('string'), []);
const docs = vals.some(v => (v as any) instanceof Doc);
@@ -1423,7 +1394,7 @@ export namespace Doc {
return vals.some(v => v.includes(value)); // bcz: arghh: Todo: comparison should be parameterized as exact, or substring
}
const fieldStr = Field.toString(fieldVal as Field);
- return fieldStr.includes(value); // bcz: arghh: Todo: comparison should be parameterized as exact, or substring
+ return fieldStr.includes(value) || (value === String.fromCharCode(127) + '--undefined--' && fieldVal === undefined); // bcz: arghh: Todo: comparison should be parameterized as exact, or substring
}
export function deiconifyView(doc: Doc) {
@@ -1564,20 +1535,26 @@ export namespace Doc {
}
}
- export async function importDocument(file: File) {
+ ///
+ // imports a previously exported zip file which contains a set of documents and their assets (eg, images, videos)
+ // the 'remap' parameter determines whether the ids of the documents loaded should be kept as they were, or remapped to new ids
+ // If they are not remapped, loading the file will overwrite any existing documents with those ids
+ //
+ export async function importDocument(file: File, remap = false) {
const upload = Utils.prepend('/uploadDoc');
const formData = new FormData();
if (file) {
formData.append('file', file);
- formData.append('remap', 'true');
+ formData.append('remap', remap.toString());
const response = await fetch(upload, { method: 'POST', body: formData });
const json = await response.json();
- console.log(json);
if (json !== 'error') {
- await DocServer.GetRefFields(json.docids as string[]);
+ const docs = await DocServer.GetRefFields(json.docids as string[]);
const doc = DocCast(await DocServer.GetRefField(json.id));
- (await DocListCastAsync(doc?.LINKS))?.forEach(link => LinkManager.Instance.addLink(link));
- doc.LINKS = undefined;
+ const links = await DocServer.GetRefFields(json.linkids as string[]);
+ Array.from(Object.keys(links))
+ .map(key => links[key])
+ .forEach(link => link instanceof Doc && LinkManager.Instance.addLink(link));
return doc;
}
}
@@ -1725,8 +1702,11 @@ export namespace Doc {
}
}
+export function IdToDoc(id: string) {
+ return DocCast(DocServer.GetCachedRefField(id));
+}
ScriptingGlobals.add(function idToDoc(id: string): any {
- return DocServer.GetCachedRefField(id);
+ return IdToDoc(id);
});
ScriptingGlobals.add(function renameAlias(doc: any) {
return StrCast(Doc.GetProto(doc).title).replace(/\([0-9]*\)/, '') + `(${doc.aliasNumber})`;
diff --git a/src/fields/InkField.ts b/src/fields/InkField.ts
index a074098c1..22bea3927 100644
--- a/src/fields/InkField.ts
+++ b/src/fields/InkField.ts
@@ -1,5 +1,5 @@
import { Bezier } from 'bezier-js';
-import { createSimpleSchema, list, object, serializable } from 'serializr';
+import { alias, createSimpleSchema, list, object, serializable } from 'serializr';
import { ScriptingGlobals } from '../client/util/ScriptingGlobals';
import { Deserializable } from '../client/util/SerializationHelper';
import { Copy, ToScriptString, ToString } from './FieldSymbols';
@@ -64,7 +64,8 @@ const strokeDataSchema = createSimpleSchema({
@Deserializable('ink')
export class InkField extends ObjectField {
- @serializable(list(object(strokeDataSchema)))
+ public static InkDataFieldName = '__inkData';
+ @serializable(alias(InkField.InkDataFieldName, list(object(strokeDataSchema))))
readonly inkData: InkData;
constructor(data: InkData) {
diff --git a/src/fields/List.ts b/src/fields/List.ts
index 9c7794813..dfd24cf7a 100644
--- a/src/fields/List.ts
+++ b/src/fields/List.ts
@@ -77,10 +77,16 @@ const listHandlers: any = {
item[OnUpdate] = updateFunction(list, i + start, item, this);
}
}
+ let hintArray: {val : any, index : number}[] = [];
+ for(let i = start; i < start + deleteCount; i++) {
+ hintArray.push({val : list.__fields[i], index : i});
+ }
const res = list.__fields.splice(start, deleteCount, ...items);
+ // the hint object sends the starting index of the slice and the number
+ // of elements to delete.
this[Update](
items.length === 0 && deleteCount
- ? { op: '$remFromSet', items: removed, length: list.__fields.length }
+ ? { op: '$remFromSet', items: removed, hint : { start : start, deleteCount : deleteCount }, length: list.__fields.length }
: items.length && !deleteCount && start === list.__fields.length
? { op: '$addToSet', items, length: list.__fields.length }
: undefined
@@ -240,6 +246,7 @@ type ListUpdate<T> = ListSpliceUpdate<T> | ListIndexUpdate<T>;
type StoredType<T extends Field> = T extends RefField ? ProxyField<T> : T;
+export const ListFieldName="fields";
@Deserializable('list')
class ListImpl<T extends Field> extends ObjectField {
constructor(fields?: T[]) {
@@ -289,7 +296,8 @@ class ListImpl<T extends Field> extends ObjectField {
return this.__fields.map(toRealField);
}
- @serializable(alias('fields', list(autoObject(), { afterDeserialize: afterDocDeserialize })))
+ public static FieldDataName = 'fields';
+ @serializable(alias(ListFieldName, list(autoObject(), { afterDeserialize: afterDocDeserialize })))
private get __fields() {
return this.___fields;
}
diff --git a/src/fields/ScriptField.ts b/src/fields/ScriptField.ts
index feb419597..2b8750714 100644
--- a/src/fields/ScriptField.ts
+++ b/src/fields/ScriptField.ts
@@ -114,7 +114,7 @@ export class ScriptField extends ObjectField {
}
[ToScriptString]() {
- return 'script field';
+ return this.script.originalScript;
}
[ToString]() {
return this.script.originalScript;
diff --git a/src/fields/URLField.ts b/src/fields/URLField.ts
index 00c78e231..8ac20b1e5 100644
--- a/src/fields/URLField.ts
+++ b/src/fields/URLField.ts
@@ -54,9 +54,6 @@ export const nullAudio = 'https://actions.google.com/sounds/v1/alarms/beep_short
@Deserializable('audio')
export class AudioField extends URLField {}
@scriptingGlobal
-@Deserializable('recording')
-export class RecordingField extends URLField {}
-@scriptingGlobal
@Deserializable('image')
export class ImageField extends URLField {}
@scriptingGlobal
@@ -69,14 +66,8 @@ export class PdfField extends URLField {}
@Deserializable('web')
export class WebField extends URLField {}
@scriptingGlobal
-@Deserializable('map')
-export class MapField extends URLField {}
-@scriptingGlobal
@Deserializable('csv')
export class CsvField extends URLField {}
@scriptingGlobal
@Deserializable('youtube')
export class YoutubeField extends URLField {}
-@scriptingGlobal
-@Deserializable('webcam')
-export class WebCamField extends URLField {}
diff --git a/src/fields/documentSchemas.ts b/src/fields/documentSchemas.ts
index 5b489a96c..b7fd06973 100644
--- a/src/fields/documentSchemas.ts
+++ b/src/fields/documentSchemas.ts
@@ -89,7 +89,6 @@ export const documentSchema = createSchema({
hideAllLinks: 'boolean', // whether all individual blue anchor dots should be hidden
linkDisplay: 'boolean', // whether a link connection should be shown between link anchor endpoints.
isLightbox: 'boolean', // whether the marked object will display addDocTab() calls that target "lightbox" destinations
- isLinkButton: 'boolean', // whether document functions as a link follow button to follow the first link on the document when clicked
layers: listSpec('string'), // which layers the document is part of
_lockedPosition: 'boolean', // whether the document can be moved (dragged)
_lockedTransform: 'boolean', // whether a freeformview can pan/zoom
diff --git a/src/fields/util.ts b/src/fields/util.ts
index b0ad4db5d..2bc2bb3f8 100644
--- a/src/fields/util.ts
+++ b/src/fields/util.ts
@@ -110,8 +110,11 @@ const _setterImpl = action(function (target: any, prop: string | symbol | number
redo: () => (receiver[prop] = value),
undo: () => {
const wasUpdate = receiver[UpdatingFromServer];
+ const wasForce = receiver[ForceServerWrite];
+ receiver[ForceServerWrite] = true; // needed since writes aren't propagated to server if UpdatingFromServerIsSet
receiver[UpdatingFromServer] = true; // needed if the event caused ACL's to change such that the doc is otherwise no longer editable.
receiver[prop] = curValue;
+ receiver[ForceServerWrite] = wasForce;
receiver[UpdatingFromServer] = wasUpdate;
},
prop: prop?.toString(),
@@ -403,7 +406,7 @@ export function updateFunction(target: any, prop: any, value: any, receiver: any
diff?.op === '$addToSet'
? { $addToSet: { ['fields.' + prop]: SerializationHelper.Serialize(new List<Doc>(diff.items)) } }
: diff?.op === '$remFromSet'
- ? { $remFromSet: { ['fields.' + prop]: SerializationHelper.Serialize(new List<Doc>(diff.items)) } }
+ ? { $remFromSet: { ['fields.' + prop]: SerializationHelper.Serialize(new List<Doc>(diff.items)), hint : diff.hint } }
: { $set: { ['fields.' + prop]: SerializationHelper.Serialize(value) } };
!op.$set && ((op as any).length = diff.length);
const prevValue = ObjectField.MakeCopy(lastValue as List<any>);