aboutsummaryrefslogtreecommitdiff
path: root/src/client/DocServer.ts
diff options
context:
space:
mode:
authorbrynnchernosky <56202540+brynnchernosky@users.noreply.github.com>2023-01-19 14:33:22 -0500
committerbrynnchernosky <56202540+brynnchernosky@users.noreply.github.com>2023-01-19 14:33:22 -0500
commit0ef7050b0792ce183c7d5cda637cb79b7a92b704 (patch)
treed1dca8f09ddc2954c2ce88439172aeded672c0b6 /src/client/DocServer.ts
parentceb338752aacc383c97a0e3a9b608365a1cf39b6 (diff)
parentd5f796b433d7e72130d4109a3775347ccb10c454 (diff)
Merge branch 'master' of github.com:brown-dash/Dash-Web into master
Diffstat (limited to 'src/client/DocServer.ts')
-rw-r--r--src/client/DocServer.ts59
1 files changed, 38 insertions, 21 deletions
diff --git a/src/client/DocServer.ts b/src/client/DocServer.ts
index 5a34fcf11..cab90138f 100644
--- a/src/client/DocServer.ts
+++ b/src/client/DocServer.ts
@@ -2,6 +2,7 @@ import { runInAction } from 'mobx';
import * as rp from 'request-promise';
import * as io from 'socket.io-client';
import { Doc, Opt, UpdatingFromServer } from '../fields/Doc';
+import { FieldLoader } from '../fields/FieldLoader';
import { HandleUpdate, Id, Parent } from '../fields/FieldSymbols';
import { ObjectField } from '../fields/ObjectField';
import { RefField } from '../fields/RefField';
@@ -29,15 +30,23 @@ export namespace DocServer {
let _cache: { [id: string]: RefField | Promise<Opt<RefField>> } = {};
export function UPDATE_SERVER_CACHE(print: boolean = false) {
- const strings: string[] = [];
- Array.from(Object.keys(_cache)).forEach(key => {
- const doc = _cache[key];
- if (doc instanceof Doc) strings.push(StrCast(doc.author) + ' ' + StrCast(doc.title) + ' ' + StrCast(Doc.GetT(doc, 'title', 'string', true)));
+ if (print) {
+ const strings: string[] = [];
+ Array.from(Object.keys(_cache)).forEach(key => {
+ const doc = _cache[key];
+ if (doc instanceof Doc) strings.push(StrCast(doc.author) + ' ' + StrCast(doc.title) + ' ' + StrCast(Doc.GetT(doc, 'title', 'string', true)));
+ });
+ strings.sort().forEach((str, i) => console.log(i.toString() + ' ' + str));
+ }
+ const filtered = Array.from(Object.keys(_cache)).filter(key => {
+ const doc = _cache[key] as Doc;
+ if (!(StrCast(doc.author).includes('.edu') || StrCast(doc.author).includes('.com')) || doc.author == Doc.CurrentUserEmail) return true;
+ return false;
});
- print && strings.sort().forEach((str, i) => console.log(i.toString() + ' ' + str));
+
rp.post(Utils.prepend('/setCacheDocumentIds'), {
body: {
- cacheDocumentIds: Array.from(Object.keys(_cache)).join(';'),
+ cacheDocumentIds: filtered.join(';'),
},
json: true,
});
@@ -349,21 +358,31 @@ export namespace DocServer {
// 2) synchronously, we emit a single callback to the server requesting the serialized (i.e. represented by a string)
// fields for the given ids. This returns a promise, which, when resolved, indicates that all the JSON serialized versions of
// the fields have been returned from the server
+ console.log('Requesting ' + requestedIds.length + ' fields');
+ FieldLoader.active && runInAction(() => (FieldLoader.ServerLoadStatus.requested = requestedIds.length));
const getSerializedFields: Promise<any> = Utils.EmitCallback(_socket, MessageStore.GetRefFields, requestedIds);
// 3) when the serialized RefFields have been received, go head and begin deserializing them into objects.
// Here, once deserialized, we also invoke .proto to 'load' the documents' prototypes, which ensures that all
// future .proto calls on the Doc won't have to go farther than the cache to get their actual value.
- const deserializeFields = getSerializedFields.then(async fields => {
- const fieldMap: { [id: string]: RefField } = {};
+
+ let retrieved = 0;
+ const fields: { [id: string]: RefField } = {};
+ await getSerializedFields.then(async fieldvals => {
+ console.log('deserializing ' + fieldvals.length + ' fields');
const proms: Promise<void>[] = [];
- runInAction(() => {
- for (const field of fields) {
+ await runInAction(async () => {
+ for (const field of fieldvals) {
const cached = _cache[field.id];
if (!cached) {
+ retrieved++;
+ if (FieldLoader.active && retrieved % 150 === 0) {
+ runInAction(() => (FieldLoader.ServerLoadStatus.retrieved = retrieved));
+ await new Promise(res => setTimeout(res));
+ }
// deserialize
- const prom = SerializationHelper.Deserialize(field).then(deserialized => {
- fieldMap[field.id] = deserialized;
+ const prom = SerializationHelper.Deserialize(field).then(async deserialized => {
+ fields[field.id] = deserialized;
//overwrite or delete any promises (that we inserted as flags
// to indicate that the field was in the process of being fetched). Now everything
@@ -385,29 +404,27 @@ export namespace DocServer {
// adds to a list of promises that will be awaited asynchronously
proms.push(prom);
} else if (cached instanceof Promise) {
+ console.log('.');
proms.push(cached as any);
- cached.then((f: any) => (fieldMap[field.id] = f));
+ cached.then((f: any) => (fields[field.id] = f));
} else if (field) {
+ console.log('-');
proms.push(cached as any);
- fieldMap[field.id] = DocServer.GetCachedRefField(field.id) || field;
+ fields[field.id] = DocServer.GetCachedRefField(field.id) || field;
}
}
});
- await Promise.all(proms);
- return fieldMap;
+ return Promise.all(proms);
});
// 5) at this point, all fields have a) been returned from the server and b) been deserialized into actual Field objects whose
// prototype documents, if any, have also been fetched and cached.
- const fields = await deserializeFields;
+ console.log('Deserialized ' + Object.keys(fields).length + ' fields');
// 6) with this confidence, we can now go through and update the cache at the ids of the fields that
// we explicitly had to fetch. To finish it off, we add whatever value we've come up with for a given
// id to the soon-to-be-returned field mapping.
- requestedIds.forEach(id => {
- const field = fields[id];
- map[id] = field;
- });
+ requestedIds.forEach(id => (map[id] = fields[id]));
}
// 7) those promises we encountered in the else if of 1), which represent