diff options
Diffstat (limited to 'src/client/DocServer.ts')
-rw-r--r-- | src/client/DocServer.ts | 173 |
1 files changed, 101 insertions, 72 deletions
diff --git a/src/client/DocServer.ts b/src/client/DocServer.ts index 5a34fcf11..2a7f5a09b 100644 --- a/src/client/DocServer.ts +++ b/src/client/DocServer.ts @@ -1,7 +1,9 @@ import { runInAction } from 'mobx'; import * as rp from 'request-promise'; import * as io from 'socket.io-client'; -import { Doc, Opt, UpdatingFromServer } from '../fields/Doc'; +import { Doc, Opt } from '../fields/Doc'; +import { UpdatingFromServer } from '../fields/DocSymbols'; +import { FieldLoader } from '../fields/FieldLoader'; import { HandleUpdate, Id, Parent } from '../fields/FieldSymbols'; import { ObjectField } from '../fields/ObjectField'; import { RefField } from '../fields/RefField'; @@ -28,16 +30,34 @@ import { GestureOverlay } from './views/GestureOverlay'; export namespace DocServer { let _cache: { [id: string]: RefField | Promise<Opt<RefField>> } = {}; + export function QUERY_SERVER_CACHE(title: string) { + const foundDocId = Array.from(Object.keys(_cache)) + .filter(key => _cache[key] instanceof Doc) + .find(key => (_cache[key] as Doc).title === title); + + return foundDocId ? (_cache[foundDocId] as Doc) : undefined; + } + let lastCacheUpdate = 0; export function UPDATE_SERVER_CACHE(print: boolean = false) { - const strings: string[] = []; - Array.from(Object.keys(_cache)).forEach(key => { - const doc = _cache[key]; - if (doc instanceof Doc) strings.push(StrCast(doc.author) + ' ' + StrCast(doc.title) + ' ' + StrCast(Doc.GetT(doc, 'title', 'string', true))); + if (print) { + const strings: string[] = []; + Array.from(Object.keys(_cache)).forEach(key => { + const doc = _cache[key]; + if (doc instanceof Doc) strings.push(StrCast(doc.author) + ' ' + StrCast(doc.title) + ' ' + StrCast(Doc.GetT(doc, 'title', 'string', true))); + }); + strings.sort().forEach((str, i) => console.log(i.toString() + ' ' + str)); + } + const filtered = Array.from(Object.keys(_cache)).filter(key => { + const doc = _cache[key] as Doc; + if (!(StrCast(doc.author).includes('.edu') || StrCast(doc.author).includes('.com')) || doc.author == Doc.CurrentUserEmail) return true; + return false; }); - print && strings.sort().forEach((str, i) => console.log(i.toString() + ' ' + str)); + if (filtered.length === lastCacheUpdate) return; + lastCacheUpdate = filtered.length; + rp.post(Utils.prepend('/setCacheDocumentIds'), { body: { - cacheDocumentIds: Array.from(Object.keys(_cache)).join(';'), + cacheDocumentIds: filtered.join(';'), }, json: true, }); @@ -225,7 +245,7 @@ export namespace DocServer { * the server if the document has not been cached. * @param id the id of the requested document */ - const _GetRefFieldImpl = async (id: string, force: boolean = false): Promise<Opt<RefField>> => { + const _GetRefFieldImpl = (id: string, force: boolean = false): Promise<Opt<RefField>> => { // an initial pass through the cache to determine whether the document needs to be fetched, // is already in the process of being fetched or already exists in the // cache @@ -300,8 +320,7 @@ export namespace DocServer { } export async function getYoutubeChannels() { - const apiKey = await Utils.EmitCallback(_socket, MessageStore.YoutubeApiQuery, { type: YoutubeQueryTypes.Channels }); - return apiKey; + return await Utils.EmitCallback(_socket, MessageStore.YoutubeApiQuery, { type: YoutubeQueryTypes.Channels }); } export function getYoutubeVideos(videoTitle: string, callBack: (videos: any[]) => void) { @@ -320,10 +339,11 @@ export namespace DocServer { */ const _GetRefFieldsImpl = async (ids: string[]): Promise<{ [id: string]: Opt<RefField> }> => { const requestedIds: string[] = []; - const waitingIds: string[] = []; - const promises: Promise<Opt<RefField>>[] = []; - const map: { [id: string]: Opt<RefField> } = {}; + const promises: Promise<any>[] = []; + let defaultRes: any = undefined; + const defaultPromise = new Promise<any>(res => (defaultRes = res)); + let defaultPromises: { p: Promise<any>; id: string }[] = []; // 1) an initial pass through the cache to determine // i) which documents need to be fetched // ii) which are already in the process of being fetched @@ -331,6 +351,13 @@ export namespace DocServer { for (const id of ids) { const cached = _cache[id]; if (cached === undefined) { + defaultPromises.push({ + id, + p: (_cache[id] = new Promise<any>(async res => { + await defaultPromise; + res(_cache[id]); + })), + }); // NOT CACHED => we'll have to send a request to the server requestedIds.push(id); } else if (cached instanceof Promise) { @@ -338,10 +365,10 @@ export namespace DocServer { // and requested one of the documents I'm looking for. Shouldn't fetch again, just // wait until this promise is resolved (see 7) promises.push(cached); - waitingIds.push(id); + // waitingIds.push(id); } else { // CACHED => great, let's just add it to the field map - map[id] = cached; + // map[id] = cached; } } @@ -349,66 +376,65 @@ export namespace DocServer { // 2) synchronously, we emit a single callback to the server requesting the serialized (i.e. represented by a string) // fields for the given ids. This returns a promise, which, when resolved, indicates that all the JSON serialized versions of // the fields have been returned from the server - const getSerializedFields: Promise<any> = Utils.EmitCallback(_socket, MessageStore.GetRefFields, requestedIds); + console.log('Requesting ' + requestedIds.length); + FieldLoader.active && runInAction(() => (FieldLoader.ServerLoadStatus.requested = requestedIds.length)); + const serializedFields = await Utils.EmitCallback(_socket, MessageStore.GetRefFields, requestedIds); // 3) when the serialized RefFields have been received, go head and begin deserializing them into objects. // Here, once deserialized, we also invoke .proto to 'load' the documents' prototypes, which ensures that all // future .proto calls on the Doc won't have to go farther than the cache to get their actual value. - const deserializeFields = getSerializedFields.then(async fields => { - const fieldMap: { [id: string]: RefField } = {}; - const proms: Promise<void>[] = []; - runInAction(() => { - for (const field of fields) { - const cached = _cache[field.id]; - if (!cached) { - // deserialize - const prom = SerializationHelper.Deserialize(field).then(deserialized => { - fieldMap[field.id] = deserialized; - - //overwrite or delete any promises (that we inserted as flags - // to indicate that the field was in the process of being fetched). Now everything - // should be an actual value within or entirely absent from the cache. - if (deserialized !== undefined) { - _cache[field.id] = deserialized; - } else { - delete _cache[field.id]; - } - return deserialized; - }); - // 4) here, for each of the documents we've requested *ourselves* (i.e. weren't promises or found in the cache) - // we set the value at the field's id to a promise that will resolve to the field. - // When we find that promises exist at keys in the cache, THIS is where they were set, just by some other caller (method). - // The mapping in the .then call ensures that when other callers await these promises, they'll - // get the resolved field - _cache[field.id] = prom; - - // adds to a list of promises that will be awaited asynchronously - proms.push(prom); - } else if (cached instanceof Promise) { - proms.push(cached as any); - cached.then((f: any) => (fieldMap[field.id] = f)); - } else if (field) { - proms.push(cached as any); - fieldMap[field.id] = DocServer.GetCachedRefField(field.id) || field; - } - } - }); - await Promise.all(proms); - return fieldMap; - }); + let processed = 0; + console.log('deserializing ' + serializedFields.length + ' fields'); + for (const field of serializedFields) { + processed++; + if (FieldLoader.active && processed % 150 === 0) { + runInAction(() => (FieldLoader.ServerLoadStatus.retrieved = processed)); + await new Promise(res => setTimeout(res)); // force loading to yield to splash screen rendering to update progress + } + const cached = _cache[field.id]; + if (!cached || (cached instanceof Promise && defaultPromises.some(dp => dp.p === cached))) { + // deserialize + // adds to a list of promises that will be awaited asynchronously + promises.push( + (_cache[field.id] = SerializationHelper.Deserialize(field).then(deserialized => { + //overwrite or delete any promises (that we inserted as flags + // to indicate that the field was in the process of being fetched). Now everything + // should be an actual value within or entirely absent from the cache. + if (deserialized !== undefined) { + _cache[field.id] = deserialized; + } else { + delete _cache[field.id]; + } + const promInd = defaultPromises.findIndex(dp => dp.id === field.id); + promInd !== -1 && defaultPromises.splice(promInd, 1); + return deserialized; + })) + ); + // 4) here, for each of the documents we've requested *ourselves* (i.e. weren't promises or found in the cache) + // we set the value at the field's id to a promise that will resolve to the field. + // When we find that promises exist at keys in the cache, THIS is where they were set, just by some other caller (method). + // The mapping in the .then call ensures that when other callers await these promises, they'll + // get the resolved field + } else if (cached instanceof Promise) { + console.log('.'); + //promises.push(cached); + } else if (field) { + // console.log('-'); + } + } + } - // 5) at this point, all fields have a) been returned from the server and b) been deserialized into actual Field objects whose - // prototype documents, if any, have also been fetched and cached. - const fields = await deserializeFields; + await Promise.all(promises); + defaultPromises.forEach(df => delete _cache[df.id]); + defaultRes(); - // 6) with this confidence, we can now go through and update the cache at the ids of the fields that - // we explicitly had to fetch. To finish it off, we add whatever value we've come up with for a given - // id to the soon-to-be-returned field mapping. - requestedIds.forEach(id => { - const field = fields[id]; - map[id] = field; - }); - } + // 5) at this point, all fields have a) been returned from the server and b) been deserialized into actual Field objects whose + // prototype documents, if any, have also been fetched and cached. + console.log('Deserialized ' + (requestedIds.length - defaultPromises.length) + ' fields'); + // 6) with this confidence, we can now go through and update the cache at the ids of the fields that + // we explicitly had to fetch. To finish it off, we add whatever value we've come up with for a given + // id to the soon-to-be-returned field mapping. + //ids.forEach(id => (map[id] = _cache[id] as any)); // 7) those promises we encountered in the else if of 1), which represent // other callers having already submitted a request to the server for (a) document(s) @@ -417,16 +443,19 @@ export namespace DocServer { // // fortunately, those other callers will also hit their own version of 6) and clean up // the shared cache when these promises resolve, so all we have to do is... - const otherCallersFetching = await Promise.all(promises); + //const otherCallersFetching = await Promise.all(promises); // ...extract the RefFields returned from the resolution of those promises and add them to our // own map. - waitingIds.forEach((id, index) => (map[id] = otherCallersFetching[index])); + // waitingIds.forEach((id, index) => (map[id] = otherCallersFetching[index])); // now, we return our completed mapping from all of the ids that were passed into the method // to their actual RefField | undefined values. This return value either becomes the input // argument to the caller's promise (i.e. GetRefFields(["_id1_", "_id2_", "_id3_"]).then(map => //do something with map...)) // or it is the direct return result if the promise is awaited (i.e. let fields = await GetRefFields(["_id1_", "_id2_", "_id3_"])). - return map; + return ids.reduce((map, id) => { + map[id] = _cache[id] as any; + return map; + }, {} as { [id: string]: Opt<RefField> }); }; let _GetRefFields: (ids: string[]) => Promise<{ [id: string]: Opt<RefField> }> = errorFunc; |