diff options
| author | A.J. Shulman <Shulman.aj@gmail.com> | 2025-04-24 13:21:00 -0400 |
|---|---|---|
| committer | A.J. Shulman <Shulman.aj@gmail.com> | 2025-04-24 13:21:00 -0400 |
| commit | 5ce2263849bfb901e276a4c5fc8ca2dbd8b80350 (patch) | |
| tree | 34d4ac121cb13f141fcdb63325446d9c125813c4 /src/client/views/nodes/chatbot/tools/WebsiteInfoScraperTool.ts | |
| parent | 321977e670cbdf10f6c49fc9071e3260a8bd4aae (diff) | |
attempt at linking docs but listing metadata doesn't work
Diffstat (limited to 'src/client/views/nodes/chatbot/tools/WebsiteInfoScraperTool.ts')
| -rw-r--r-- | src/client/views/nodes/chatbot/tools/WebsiteInfoScraperTool.ts | 11 |
1 files changed, 5 insertions, 6 deletions
diff --git a/src/client/views/nodes/chatbot/tools/WebsiteInfoScraperTool.ts b/src/client/views/nodes/chatbot/tools/WebsiteInfoScraperTool.ts index 19ccd0b36..bff38ae15 100644 --- a/src/client/views/nodes/chatbot/tools/WebsiteInfoScraperTool.ts +++ b/src/client/views/nodes/chatbot/tools/WebsiteInfoScraperTool.ts @@ -66,11 +66,11 @@ const websiteInfoScraperToolInfo: ToolInfo<WebsiteInfoScraperToolParamsType> = { }; export class WebsiteInfoScraperTool extends BaseTool<WebsiteInfoScraperToolParamsType> { - private _addLinkedUrlDoc: (url: string, id: string) => void; + private _getLinkedUrlDocId: (url: string) => string[]; - constructor(addLinkedUrlDoc: (url: string, id: string) => void) { + constructor(getLinkedUrlDocIds: (url: string) => string[]) { super(websiteInfoScraperToolInfo); - this._addLinkedUrlDoc = addLinkedUrlDoc; + this._getLinkedUrlDocId = getLinkedUrlDocIds; } async execute(args: ParametersType<WebsiteInfoScraperToolParamsType>): Promise<Observation[]> { @@ -79,9 +79,8 @@ export class WebsiteInfoScraperTool extends BaseTool<WebsiteInfoScraperToolParam // Create an array of promises, each one handling a website scrape for a URL const scrapingPromises = urls.map(async url => { try { - const { website_plain_text } = await Networking.PostToServer('/scrapeWebsite', { url }); - const id = uuidv4(); - this._addLinkedUrlDoc(url, id); + const { website_plain_text } = (await Networking.PostToServer('/scrapeWebsite', { url })) as { website_plain_text: string }; + const id = this._getLinkedUrlDocId(url); return { type: 'text', text: `<chunk chunk_id="${id}" chunk_type="url">\n${website_plain_text}\n</chunk>`, |
