aboutsummaryrefslogtreecommitdiff
path: root/src/client/DocServer.ts
diff options
context:
space:
mode:
Diffstat (limited to 'src/client/DocServer.ts')
-rw-r--r--src/client/DocServer.ts29
1 files changed, 16 insertions, 13 deletions
diff --git a/src/client/DocServer.ts b/src/client/DocServer.ts
index e61de86be..ddc4318aa 100644
--- a/src/client/DocServer.ts
+++ b/src/client/DocServer.ts
@@ -351,21 +351,25 @@ export namespace DocServer {
// 2) synchronously, we emit a single callback to the server requesting the serialized (i.e. represented by a string)
// fields for the given ids. This returns a promise, which, when resolved, indicates that all the JSON serialized versions of
// the fields have been returned from the server
+ console.log('Requesting ' + requestedIds.length + ' fields');
const getSerializedFields: Promise<any> = Utils.EmitCallback(_socket, MessageStore.GetRefFields, requestedIds);
// 3) when the serialized RefFields have been received, go head and begin deserializing them into objects.
// Here, once deserialized, we also invoke .proto to 'load' the documents' prototypes, which ensures that all
// future .proto calls on the Doc won't have to go farther than the cache to get their actual value.
- const deserializeFields = getSerializedFields.then(async fields => {
- const fieldMap: { [id: string]: RefField } = {};
+
+ const fields: { [id: string]: RefField } = {};
+ await getSerializedFields.then(async fieldvals => {
+ console.log('deserializing ' + fieldvals.length + ' fields');
const proms: Promise<void>[] = [];
runInAction(() => {
- for (const field of fields) {
+ for (const field of fieldvals) {
const cached = _cache[field.id];
if (!cached) {
+ console.log('<');
// deserialize
const prom = SerializationHelper.Deserialize(field).then(deserialized => {
- fieldMap[field.id] = deserialized;
+ fields[field.id] = deserialized;
//overwrite or delete any promises (that we inserted as flags
// to indicate that the field was in the process of being fetched). Now everything
@@ -375,6 +379,7 @@ export namespace DocServer {
} else {
delete _cache[field.id];
}
+ console.log('>');
return deserialized;
});
// 4) here, for each of the documents we've requested *ourselves* (i.e. weren't promises or found in the cache)
@@ -387,29 +392,27 @@ export namespace DocServer {
// adds to a list of promises that will be awaited asynchronously
proms.push(prom);
} else if (cached instanceof Promise) {
+ console.log('.');
proms.push(cached as any);
- cached.then((f: any) => (fieldMap[field.id] = f));
+ cached.then((f: any) => (fields[field.id] = f));
} else if (field) {
+ console.log('-');
proms.push(cached as any);
- fieldMap[field.id] = DocServer.GetCachedRefField(field.id) || field;
+ fields[field.id] = DocServer.GetCachedRefField(field.id) || field;
}
}
});
- await Promise.all(proms);
- return fieldMap;
+ return Promise.all(proms);
});
// 5) at this point, all fields have a) been returned from the server and b) been deserialized into actual Field objects whose
// prototype documents, if any, have also been fetched and cached.
- const fields = await deserializeFields;
+ console.log('Deserialized ' + Object.keys(fields).length + ' fields');
// 6) with this confidence, we can now go through and update the cache at the ids of the fields that
// we explicitly had to fetch. To finish it off, we add whatever value we've come up with for a given
// id to the soon-to-be-returned field mapping.
- requestedIds.forEach(id => {
- const field = fields[id];
- map[id] = field;
- });
+ requestedIds.forEach(id => (map[id] = fields[id]));
}
// 7) those promises we encountered in the else if of 1), which represent