/** * @file ChatBox.tsx * @description This file defines the ChatBox component, which manages user interactions with * an AI assistant. It handles document uploads, chat history, message input, and integration * with the OpenAI API. The ChatBox is MobX-observable and tracks the progress of tasks such as * document analysis and AI-driven summaries. It also maintains real-time chat functionality * with support for follow-up questions and citation management. */ import dotenv from 'dotenv'; import { ObservableSet, action, computed, makeObservable, observable, observe, reaction, runInAction } from 'mobx'; import { observer } from 'mobx-react'; import OpenAI, { ClientOptions } from 'openai'; import * as React from 'react'; import { v4 as uuidv4 } from 'uuid'; import { ClientUtils, OmitKeys } from '../../../../../ClientUtils'; import { Doc, DocListCast, Opt } from '../../../../../fields/Doc'; import { DocData, DocViews } from '../../../../../fields/DocSymbols'; import { RichTextField } from '../../../../../fields/RichTextField'; import { ScriptField } from '../../../../../fields/ScriptField'; import { CsvCast, DocCast, NumCast, PDFCast, RTFCast, StrCast } from '../../../../../fields/Types'; import { DocUtils } from '../../../../documents/DocUtils'; import { CollectionViewType, DocumentType } from '../../../../documents/DocumentTypes'; import { Docs, DocumentOptions } from '../../../../documents/Documents'; import { DocumentManager } from '../../../../util/DocumentManager'; import { ImageUtils } from '../../../../util/Import & Export/ImageUtils'; import { LinkManager } from '../../../../util/LinkManager'; import { CompileError, CompileScript } from '../../../../util/Scripting'; import { DictationButton } from '../../../DictationButton'; import { ViewBoxAnnotatableComponent } from '../../../DocComponent'; import { AudioBox } from '../../AudioBox'; import { DocumentView, DocumentViewInternal } from '../../DocumentView'; import { FieldView, FieldViewProps } from '../../FieldView'; import { PDFBox } from '../../PDFBox'; import { ScriptingBox } from '../../ScriptingBox'; import { VideoBox } from '../../VideoBox'; import { Agent } from '../agentsystem/Agent'; import { supportedDocTypes } from '../tools/CreateDocumentTool'; import { ASSISTANT_ROLE, AssistantMessage, CHUNK_TYPE, Citation, ProcessingInfo, SimplifiedChunk, TEXT_TYPE } from '../types/types'; import { Vectorstore } from '../vectorstore/Vectorstore'; import './ChatBox.scss'; import MessageComponentBox from './MessageComponent'; import { ProgressBar } from './ProgressBar'; import { OpenWhere } from '../../OpenWhere'; import { Upload } from '../../../../../server/SharedMediaTypes'; dotenv.config(); export type parsedDocData = { doc_type: string; data: unknown }; export type parsedDoc = DocumentOptions & parsedDocData; /** * ChatBox is the main class responsible for managing the interaction between the user and the assistant, * handling documents, and integrating with OpenAI for tasks such as document analysis, chat functionality, * and vector store interactions. */ @observer export class ChatBox extends ViewBoxAnnotatableComponent() { // MobX observable properties to track UI state and data @observable private _history: AssistantMessage[] = []; @observable.deep private _current_message: AssistantMessage | undefined = undefined; @observable private _isLoading: boolean = false; @observable private _uploadProgress: number = 0; @observable private _currentStep: string = ''; @observable private _expandedScratchpadIndex: number | null = null; @observable private _inputValue: string = ''; @observable private _linked_docs_to_add: ObservableSet = observable.set(); @observable private _linked_csv_files: { filename: string; id: string; text: string }[] = []; @observable private _isUploadingDocs: boolean = false; @observable private _citationPopup: { text: string; visible: boolean } = { text: '', visible: false }; // Private properties for managing OpenAI API, vector store, agent, and UI elements private openai: OpenAI; private vectorstore_id: string; private vectorstore: Vectorstore; private agent: Agent; private messagesRef: React.RefObject; private _textInputRef: HTMLInputElement | undefined | null; /** * Static method that returns the layout string for the field. * @param fieldKey Key to get the layout string. */ public static LayoutString(fieldKey: string) { return FieldView.LayoutString(ChatBox, fieldKey); } setChatInput = action((input: string) => { this._inputValue = input; }); /** * Constructor initializes the component, sets up OpenAI, vector store, and agent instances, * and observes changes in the chat history to save the state in dataDoc. * @param props The properties passed to the component. */ constructor(props: FieldViewProps) { super(props); makeObservable(this); // Enable MobX observables // Initialize OpenAI, vectorstore, and agent this.openai = this.initializeOpenAI(); if (StrCast(this.dataDoc.vectorstore_id) == '') { this.vectorstore_id = uuidv4(); this.dataDoc.vectorstore_id = this.vectorstore_id; } else { this.vectorstore_id = StrCast(this.dataDoc.vectorstore_id); } this.vectorstore = new Vectorstore(this.vectorstore_id, this.retrieveDocIds); this.agent = new Agent(this.vectorstore, this.retrieveSummaries, this.retrieveFormattedHistory, this.retrieveCSVData, this.addLinkedUrlDoc, this.createImageInDash, this.createDocInDash, this.createCSVInDash); this.messagesRef = React.createRef(); // Reaction to update dataDoc when chat history changes reaction( () => this._history.map((msg: AssistantMessage) => ({ role: msg.role, content: msg.content, follow_up_questions: msg.follow_up_questions, citations: msg.citations, })), serializableHistory => { this.dataDoc.data = JSON.stringify(serializableHistory); } ); } /** * Adds a document to the vectorstore for AI-based analysis. * Handles the upload progress and errors during the process. * @param newLinkedDoc The new document to add. */ @action addDocToVectorstore = async (newLinkedDoc: Doc) => { this._uploadProgress = 0; this._currentStep = 'Initializing...'; this._isUploadingDocs = true; try { // Add the document to the vectorstore await this.vectorstore.addAIDoc(newLinkedDoc, this.updateProgress); } catch (error) { console.error('Error uploading document:', error); this._currentStep = 'Error during upload'; } finally { runInAction(() => { this._isUploadingDocs = false; this._uploadProgress = 0; this._currentStep = ''; }); } }; /** * Updates the upload progress and the current step in the UI. * @param progress The percentage of the progress. * @param step The current step name. */ @action updateProgress = (progress: number, step: string) => { this._uploadProgress = progress; this._currentStep = step; }; /** * Adds a CSV file for analysis by sending it to OpenAI and generating a summary. * @param newLinkedDoc The linked document representing the CSV file. * @param id Optional ID for the document. */ @action addCSVForAnalysis = async (newLinkedDoc: Doc, id?: string) => { if (!newLinkedDoc.chunk_simpl) { // Convert document text to CSV data const csvData: string = StrCast(newLinkedDoc.text); // Generate a summary using OpenAI API const completion = await this.openai.chat.completions.create({ messages: [ { role: 'system', content: 'You are an AI assistant tasked with summarizing the content of a CSV file. You will be provided with the data from the CSV file and your goal is to generate a concise summary that captures the main themes, trends, and key points represented in the data.', }, { role: 'user', content: `Please provide a comprehensive summary of the CSV file based on the provided data. Ensure the summary highlights the most important information, patterns, and insights. Your response should be in paragraph form and be concise. CSV Data: ${csvData} ********** Summary:`, }, ], model: 'gpt-3.5-turbo', }); const csvId = id ?? uuidv4(); // Add CSV details to linked files this._linked_csv_files.push({ filename: CsvCast(newLinkedDoc.data)?.url.pathname ?? '', id: csvId, text: csvData, }); // Add a chunk for the CSV and assign the summary const chunkToAdd = { chunkId: csvId, chunkType: CHUNK_TYPE.CSV, }; newLinkedDoc.chunk_simpl = JSON.stringify({ chunks: [chunkToAdd] }); newLinkedDoc.summary = completion.choices[0].message.content!; } }; /** * Toggles the tool logs, expanding or collapsing the scratchpad at the given index. * @param index Index of the tool log to toggle. */ @action toggleToolLogs = (index: number) => { this._expandedScratchpadIndex = this._expandedScratchpadIndex === index ? null : index; }; /** * Initializes the OpenAI API client using the API key from environment variables. * @returns OpenAI client instance. */ initializeOpenAI() { const configuration: ClientOptions = { apiKey: process.env.OPENAI_KEY, dangerouslyAllowBrowser: true, }; return new OpenAI(configuration); } /** * Adds a scroll event listener to detect user scrolling and handle passive wheel events. */ addScrollListener = () => { if (this.messagesRef.current) { this.messagesRef.current.addEventListener('wheel', this.onPassiveWheel, { passive: false }); } }; /** * Removes the scroll event listener from the chat messages container. */ removeScrollListener = () => { if (this.messagesRef.current) { this.messagesRef.current.removeEventListener('wheel', this.onPassiveWheel); } }; /** * Scrolls the chat messages container to the bottom, ensuring the latest message is visible. */ scrollToBottom = () => { // if (this.messagesRef.current) { // this.messagesRef.current.scrollTop = this.messagesRef.current.scrollHeight; // } }; /** * Event handler for detecting wheel scrolling and stopping the event propagation. * @param e The wheel event. */ onPassiveWheel = (e: WheelEvent) => { if (this._props.isContentActive()) { e.stopPropagation(); } }; /** * Sends the user's input to OpenAI, displays the loading indicator, and updates the chat history. * @param event The form submission event. */ @action askGPT = async (event: React.FormEvent): Promise => { event.preventDefault(); this._inputValue = ''; // Extract the user's message const textInput = (event.currentTarget as HTMLFormElement).elements.namedItem('messageInput') as HTMLInputElement; const trimmedText = textInput.value.trim(); if (trimmedText) { try { textInput.value = ''; // Add the user's message to the history this._history.push({ role: ASSISTANT_ROLE.USER, content: [{ index: 0, type: TEXT_TYPE.NORMAL, text: trimmedText, citation_ids: null }], processing_info: [], }); this._isLoading = true; this._current_message = { role: ASSISTANT_ROLE.ASSISTANT, content: [], citations: [], processing_info: [], }; // Define callbacks for real-time processing updates const onProcessingUpdate = (processingUpdate: ProcessingInfo[]) => { runInAction(() => { if (this._current_message) { this._current_message = { ...this._current_message, processing_info: processingUpdate, }; } }); this.scrollToBottom(); }; const onAnswerUpdate = (answerUpdate: string) => { runInAction(() => { if (this._current_message) { this._current_message = { ...this._current_message, content: [{ text: answerUpdate, type: TEXT_TYPE.NORMAL, index: 0, citation_ids: [] }], }; } }); }; // Send the user's question to the assistant and get the final message const finalMessage = await this.agent.askAgent(trimmedText, onProcessingUpdate, onAnswerUpdate); // Update the history with the final assistant message runInAction(() => { if (this._current_message) { this._history.push({ ...finalMessage }); this._current_message = undefined; this.dataDoc.data = JSON.stringify(this._history); } }); } catch (err) { console.error('Error:', err); // Handle error in processing runInAction(() => this._history.push({ role: ASSISTANT_ROLE.ASSISTANT, content: [{ index: 0, type: TEXT_TYPE.ERROR, text: `Sorry, I encountered an error while processing your request: ${err} `, citation_ids: null }], processing_info: [], }) ); } finally { runInAction(() => { this._isLoading = false; }); this.scrollToBottom(); } } this.scrollToBottom(); }; /** * Updates the citations for a given message in the chat history. * @param index The index of the message in the history. * @param citations The list of citations to add to the message. */ @action updateMessageCitations = (index: number, citations: Citation[]) => { if (this._history[index]) { this._history[index].citations = citations; } }; /** * Adds a linked document from a URL for future reference and analysis. * @param url The URL of the document to add. * @param id The unique identifier for the document. */ @action addLinkedUrlDoc = async (url: string, id: string) => { const doc = Docs.Create.WebDocument(url, { data_useCors: true }); const linkDoc = Docs.Create.LinkDocument(this.Document, doc); LinkManager.Instance.addLink(linkDoc); const chunkToAdd = { chunkId: id, chunkType: CHUNK_TYPE.URL, url: url, }; doc.chunk_simpl = JSON.stringify({ chunks: [chunkToAdd] }); }; /** * Getter to retrieve the current user's name from the client utils. */ @computed get userName() { return ClientUtils.CurrentUserEmail; } /** * Creates a CSV document in the dashboard and adds it for analysis. * @param url The URL of the CSV. * @param title The title of the CSV document. * @param id The unique ID for the document. * @param data The CSV data content. */ @action createCSVInDash = (url: string, title: string, id: string, data: string) => DocUtils.DocumentFromType('csv', url, { title: title, text: RTFCast(data) }).then(doc => { if (doc) { LinkManager.Instance.addLink(Docs.Create.LinkDocument(this.Document, doc)); this._props.addDocument?.(doc); DocumentManager.Instance.showDocument(doc, { willZoomCentered: true }, () => {}).then(() => this.addCSVForAnalysis(doc, id)); } }); @action createImageInDash = async (result: Upload.FileInformation & Upload.InspectionResults, options: DocumentOptions) => { const newImgSrc = result.accessPaths.agnostic.client.indexOf('dashblobstore') === -1 // ? ClientUtils.prepend(result.accessPaths.agnostic.client) : result.accessPaths.agnostic.client; const doc = Docs.Create.ImageDocument(newImgSrc, options); this.addDocument(ImageUtils.AssignImgInfo(doc, result)); const linkDoc = Docs.Create.LinkDocument(this.Document, doc); LinkManager.Instance.addLink(linkDoc); if (doc) { if (this._props.addDocument) this._props.addDocument(doc); else DocumentViewInternal.addDocTabFunc(doc, OpenWhere.addRight); } await DocumentManager.Instance.showDocument(doc, { willZoomCentered: true }, () => {}); }; /** * Creates a text document in the dashboard and adds it for analysis. * @param title The title of the doc. * @param text_content The text of the document. * @param options Other optional document options (e.g. color) * @param id The unique ID for the document. */ @action private createCollectionWithChildren = (data: parsedDoc[], insideCol: boolean): Opt[] => data.map(doc => this.whichDoc(doc, insideCol)); @action whichDoc = (doc: parsedDoc, insideCol: boolean): Opt => { const options = OmitKeys(doc, ['doct_type', 'data']).omit as DocumentOptions; const data = (doc as parsedDocData).data; const ndoc = (() => { switch (doc.doc_type) { default: case supportedDocTypes.text: return Docs.Create.TextDocument(data as string, options); case supportedDocTypes.comparison: return this.createComparison(JSON.parse(data as string) as parsedDoc[], options); case supportedDocTypes.flashcard: return this.createFlashcard(JSON.parse(data as string) as parsedDoc[], options); case supportedDocTypes.deck: return this.createDeck(JSON.parse(data as string) as parsedDoc[], options); case supportedDocTypes.image: return Docs.Create.ImageDocument(data as string, options); case supportedDocTypes.equation: return Docs.Create.EquationDocument(data as string, options); case supportedDocTypes.notetaking: return Docs.Create.NoteTakingDocument([], options); case supportedDocTypes.web: return Docs.Create.WebDocument(data as string, { ...options, data_useCors: true }); case supportedDocTypes.dataviz: return Docs.Create.DataVizDocument('/users/rz/Downloads/addresses.csv', options); case supportedDocTypes.pdf: return Docs.Create.PdfDocument(data as string, options); case supportedDocTypes.video: return Docs.Create.VideoDocument(data as string, options); case supportedDocTypes.diagram: return Docs.Create.DiagramDocument(undefined, { text: data as unknown as RichTextField, ...options}); // text: can take a string or RichTextField but it's typed for RichTextField. // case supportedDocumentTypes.dataviz: // { // const { fileUrl, id } = await Networking.PostToServer('/createCSV', { // filename: (options.title as string).replace(/\s+/g, '') + '.csv', // data: data, // }); // const doc = Docs.Create.DataVizDocument(fileUrl, { ...options, text: RTFCast(data as string) }); // this.addCSVForAnalysis(doc, id); // return doc; // } case supportedDocTypes.script: { const result = !(data as string).trim() ? ({ compiled: false, errors: [] } as CompileError) : CompileScript(data as string, {}); const script_field = result.compiled ? new ScriptField(result, undefined, data as string) : undefined; const sdoc = Docs.Create.ScriptingDocument(script_field, options); DocumentManager.Instance.showDocument(sdoc, { willZoomCentered: true }, () => { const firstView = Array.from(sdoc[DocViews])[0] as DocumentView; (firstView.ComponentView as ScriptingBox)?.onApply?.(); (firstView.ComponentView as ScriptingBox)?.onRun?.(); }); return sdoc; } case supportedDocTypes.collection: { const arr = this.createCollectionWithChildren(JSON.parse(data as string) as parsedDoc[], true).filter(d=>d).map(d => d!); const collOpts = { _width:300, _height: 300, _layout_fitWidth: true, _freeform_backgroundGrid: true, ...options, }; return (() => { switch (options.type_collection) { case CollectionViewType.Tree: return Docs.Create.TreeDocument(arr, collOpts); case CollectionViewType.Stacking: return Docs.Create.StackingDocument(arr, collOpts); case CollectionViewType.Masonry: return Docs.Create.MasonryDocument(arr, collOpts); case CollectionViewType.Card: return Docs.Create.CardDeckDocument(arr, collOpts); case CollectionViewType.Carousel: return Docs.Create.CarouselDocument(arr, collOpts); case CollectionViewType.Carousel3D: return Docs.Create.Carousel3DDocument(arr, collOpts); case CollectionViewType.Multicolumn: return Docs.Create.CarouselDocument(arr, collOpts); default: return Docs.Create.FreeformDocument(arr, collOpts); } })(); } // case supportedDocumentTypes.map: return Docs.Create.MapDocument([], options); // case supportedDocumentTypes.button: return Docs.Create.ButtonDocument(options); // case supportedDocumentTypes.trail: return Docs.Create.PresDocument(options); } // prettier-ignore })(); if (ndoc) { ndoc.x = NumCast((options.x as number) ?? 0) + (insideCol ? 0 : NumCast(this.layoutDoc.x) + NumCast(this.layoutDoc.width)) + 100; ndoc.y = NumCast(options.y as number) + (insideCol ? 0 : NumCast(this.layoutDoc.y)); } return ndoc; }; /** * Creates a document in the dashboard. * * @param {string} doc_type - The type of document to create. * @param {string} data - The data used to generate the document. * @param {DocumentOptions} options - Configuration options for the document. * @returns {Promise} A promise that resolves once the document is created and displayed. */ @action createDocInDash = (pdoc: parsedDoc) => { const linkAndShowDoc = (doc: Opt) => { if (doc) { LinkManager.Instance.addLink(Docs.Create.LinkDocument(this.Document, doc)); this._props.addDocument?.(doc); DocumentManager.Instance.showDocument(doc, { willZoomCentered: true }, () => {}); } }; const doc = this.whichDoc(pdoc, false); if (doc) linkAndShowDoc(doc); return doc; }; /** * Creates a deck of flashcards. * * @param {any} data - The data used to generate the flashcards. Can be a string or an object. * @param {DocumentOptions} options - Configuration options for the flashcard deck. * @returns {Doc} A carousel document containing the flashcard deck. */ @action createDeck = (data: parsedDoc[], options: DocumentOptions) => { const flashcardDeck: Doc[] = []; // Process each flashcard document in the `deckData` array if (data.length == 2 && data[0].doc_type == 'text' && data[1].doc_type == 'text') { this.createFlashcard(data, options); } else { data.forEach(doc => { const flashcardDoc = this.createFlashcard((doc as parsedDocData).data as parsedDoc[] | string[], options); if (flashcardDoc) flashcardDeck.push(flashcardDoc); }); } // Create a carousel to contain the flashcard deck return Docs.Create.CarouselDocument(flashcardDeck, { title: options.title || 'Flashcard Deck', _width: options._width || 300, _height: options._height || 300, _layout_fitWidth: false, _layout_autoHeight: true, }); }; /** * Creates a single flashcard document. * * @param {any} data - The data used to generate the flashcard. Can be a string or an object. * @param {any} options - Configuration options for the flashcard. * @returns {Doc | undefined} The created flashcard document, or undefined if the flashcard cannot be created. */ @action createFlashcard = (data: parsedDoc[] | string[], options: DocumentOptions) => { const [front, back] = data; const sideOptions = { _height: 300, ...options }; // Create front and back text documents const side1 = typeof front === 'string' ? Docs.Create.CenteredTextCreator('question', front as string, sideOptions) : this.whichDoc(front, false); const side2 = typeof back === 'string' ? Docs.Create.CenteredTextCreator('answer', back as string, sideOptions) : this.whichDoc(back, false); // Create the flashcard document with both sides return Docs.Create.FlashcardDocument('flashcard', side1, side2, sideOptions); }; /** * Creates a comparison document. * * @param {any} doc - The document data containing left and right components for comparison. * @param {any} options - Configuration options for the comparison document. * @returns {Doc} The created comparison document. */ @action createComparison = (doc: parsedDoc[], options: DocumentOptions) => Docs.Create.ComparisonDocument(options.title as string, { data_back: this.whichDoc(doc[0], false), data_front: this.whichDoc(doc[1], false), _width: options._width, _height: options._height || 300, backgroundColor: options.backgroundColor, }); /** * Event handler to manage citations click in the message components. * @param citation The citation object clicked by the user. */ @action handleCitationClick = async (citation: Citation) => { const currentLinkedDocs: Doc[] = this.linkedDocs; const chunkId = citation.chunk_id; for (const doc of currentLinkedDocs) { if (doc.chunk_simpl) { const docChunkSimpl = JSON.parse(StrCast(doc.chunk_simpl)) as { chunks: SimplifiedChunk[] }; const foundChunk = docChunkSimpl.chunks.find(chunk => chunk.chunkId === chunkId); if (foundChunk) { // Handle media chunks specifically if (doc.ai_type == 'video' || doc.ai_type == 'audio') { const directMatchSegmentStart = this.getDirectMatchingSegmentStart(doc, citation.direct_text || '', foundChunk.indexes || []); if (directMatchSegmentStart) { // Navigate to the segment's start time in the media player await this.goToMediaTimestamp(doc, directMatchSegmentStart, doc.ai_type); } else { console.error('No direct matching segment found for the citation.'); } } else { // Handle other chunk types as before this.handleOtherChunkTypes(foundChunk, citation, doc); } } } } }; getDirectMatchingSegmentStart = (doc: Doc, citationText: string, indexesOfSegments: string[]): number => { // eslint-disable-next-line @typescript-eslint/no-explicit-any const originalSegments = JSON.parse(StrCast(doc.original_segments!)).map((segment: any, index: number) => ({ index: index.toString(), text: segment.text, start: segment.start, end: segment.end, })); if (!Array.isArray(originalSegments) || originalSegments.length === 0 || !Array.isArray(indexesOfSegments)) { return 0; } // Create itemsToSearch array based on indexesOfSegments const itemsToSearch = indexesOfSegments.map((indexStr: string) => { const index = parseInt(indexStr, 10); const segment = originalSegments[index]; return { text: segment.text, start: segment.start }; }); console.log('Constructed itemsToSearch:', itemsToSearch); // Helper function to calculate word overlap score const calculateWordOverlap = (text1: string, text2: string): number => { const words1 = new Set(text1.toLowerCase().split(/\W+/)); const words2 = new Set(text2.toLowerCase().split(/\W+/)); const intersection = new Set([...words1].filter(word => words2.has(word))); return intersection.size / Math.max(words1.size, words2.size); // Jaccard similarity }; // Search for the best matching segment let bestMatchStart = 0; let bestScore = 0; console.log(`Searching for best match for query: "${citationText}"`); itemsToSearch.forEach(item => { const score = calculateWordOverlap(citationText, item.text); console.log(`Comparing query to segment: "${item.text}" | Score: ${score}`); if (score > bestScore) { bestScore = score; bestMatchStart = item.start; } }); console.log('Best match found with score:', bestScore, '| Start time:', bestMatchStart); // Return the start time of the best match return bestMatchStart; }; /** * Navigates to the given timestamp in the media player. * @param doc The document containing the media file. * @param timestamp The timestamp to navigate to. */ goToMediaTimestamp = async (doc: Doc, timestamp: number, type: 'video' | 'audio') => { try { // Show the media document in the viewer if (type == 'video') { DocumentManager.Instance.showDocument(doc, { willZoomCentered: true }, () => { const firstView = Array.from(doc[DocViews])[0] as DocumentView; (firstView.ComponentView as VideoBox)?.Seek?.(timestamp); }); } else { DocumentManager.Instance.showDocument(doc, { willZoomCentered: true }, () => { const firstView = Array.from(doc[DocViews])[0] as DocumentView; (firstView.ComponentView as AudioBox)?.playFrom?.(timestamp); }); } console.log(`Navigated to timestamp: ${timestamp}s in document ${doc.id}`); } catch (error) { console.error('Error navigating to media timestamp:', error); } }; /** * Handles non-media chunk types as before. * @param foundChunk The chunk object. * @param citation The citation object. * @param doc The document containing the chunk. */ handleOtherChunkTypes = (foundChunk: SimplifiedChunk, citation: Citation, doc: Doc) => { switch (foundChunk.chunkType) { case CHUNK_TYPE.IMAGE: case CHUNK_TYPE.TABLE: { const values = foundChunk.location?.replace(/[[\]]/g, '').split(','); if (values?.length !== 4) { console.error('Location string must contain exactly 4 numbers'); return; } if (foundChunk.startPage === undefined || foundChunk.endPage === undefined) { DocumentManager.Instance.showDocument(doc, { willZoomCentered: true }, () => {}); return; } const x1 = parseFloat(values[0]) * Doc.NativeWidth(doc); const y1 = parseFloat(values[1]) * Doc.NativeHeight(doc) + foundChunk.startPage * Doc.NativeHeight(doc); const x2 = parseFloat(values[2]) * Doc.NativeWidth(doc); const y2 = parseFloat(values[3]) * Doc.NativeHeight(doc) + foundChunk.startPage * Doc.NativeHeight(doc); const annotationKey = '$' + Doc.LayoutDataKey(doc) + '_annotations'; const existingDoc = DocListCast(doc[annotationKey]).find(d => d.citation_id === citation.citation_id); const highlightDoc = existingDoc ?? this.createImageCitationHighlight(x1, y1, x2, y2, citation, annotationKey, doc); DocumentManager.Instance.showDocument(highlightDoc, { willZoomCentered: true }, () => {}); } break; case CHUNK_TYPE.TEXT: this._citationPopup = { text: citation.direct_text ?? 'No text available', visible: true }; setTimeout(() => (this._citationPopup.visible = false), 3000); DocumentManager.Instance.showDocument(doc, { willZoomCentered: true }, () => { const firstView = Array.from(doc[DocViews])[0] as DocumentView; (firstView.ComponentView as PDFBox)?.gotoPage?.(foundChunk.startPage ?? 0); (firstView.ComponentView as PDFBox)?.search?.(citation.direct_text ?? ''); }); break; case CHUNK_TYPE.CSV: case CHUNK_TYPE.URL: DocumentManager.Instance.showDocument(doc, { willZoomCentered: true }); break; default: console.error('Unhandled chunk type:', foundChunk.chunkType); break; } }; /** * Creates an annotation highlight on a PDF document for image citations. * @param x1 X-coordinate of the top-left corner of the highlight. * @param y1 Y-coordinate of the top-left corner of the highlight. * @param x2 X-coordinate of the bottom-right corner of the highlight. * @param y2 Y-coordinate of the bottom-right corner of the highlight. * @param citation The citation object to associate with the highlight. * @param annotationKey The key used to store the annotation. * @param pdfDoc The document where the highlight is created. * @returns The highlighted document. */ createImageCitationHighlight = (x1: number, y1: number, x2: number, y2: number, citation: Citation, annotationKey: string, pdfDoc: Doc): Doc => { const highlight_doc = Docs.Create.FreeformDocument([], { x: x1, y: y1, _width: x2 - x1, _height: y2 - y1, backgroundColor: 'rgba(255, 255, 0, 0.5)', }); highlight_doc.$citation_id = citation.citation_id; Doc.AddDocToList(pdfDoc[DocData], annotationKey, highlight_doc); highlight_doc.annotationOn = pdfDoc; Doc.SetContainer(highlight_doc, pdfDoc); return highlight_doc; }; /** * Lifecycle method that triggers when the component updates. * Ensures the chat is scrolled to the bottom when new messages are added. */ componentDidUpdate() { this.scrollToBottom(); } /** * Lifecycle method that triggers when the component mounts. * Initializes scroll listeners, sets up document reactions, and loads chat history from dataDoc if available. */ componentDidMount() { this._props.setContentViewBox?.(this); if (this.dataDoc.data) { try { const storedHistory = JSON.parse(StrCast(this.dataDoc.data)); runInAction(() => { this._history.push( ...storedHistory.map((msg: AssistantMessage) => ({ role: msg.role, content: msg.content, follow_up_questions: msg.follow_up_questions, citations: msg.citations, })) ); }); } catch (e) { console.error('Failed to parse history from dataDoc:', e); } } else { // Default welcome message runInAction(() => { this._history.push({ role: ASSISTANT_ROLE.ASSISTANT, content: [ { index: 0, type: TEXT_TYPE.NORMAL, text: `Hey, ${this.userName()}! Welcome to Your Friendly Assistant. Link a document or ask questions to get started.`, citation_ids: null, }, ], processing_info: [], }); }); } // Set up reactions for linked documents reaction( () => { const linkedDocs = LinkManager.Instance.getAllRelatedLinks(this.Document) .map(d => DocCast(LinkManager.getOppositeAnchor(d, this.Document))) .map(d => DocCast(d?.annotationOn, d)) .filter(d => d); return linkedDocs; }, linked => linked.forEach(doc => this._linked_docs_to_add.add(doc)) ); // Observe changes to linked documents and handle document addition observe(this._linked_docs_to_add, change => { if (change.type === 'add') { if (CsvCast(change.newValue.data)) { this.addCSVForAnalysis(change.newValue); } else { this.addDocToVectorstore(change.newValue); } } else if (change.type === 'delete') { // Handle document removal } }); this.addScrollListener(); } /** * Lifecycle method that triggers when the component unmounts. * Removes scroll listeners to avoid memory leaks. */ componentWillUnmount() { this.removeScrollListener(); } /** * Getter that retrieves all linked documents for the current document. */ @computed get linkedDocs() { return LinkManager.Instance.getAllRelatedLinks(this.Document) .map(d => DocCast(LinkManager.getOppositeAnchor(d, this.Document))) .map(d => DocCast(d?.annotationOn, d)) .filter(d => d) .map(d => d!); } /** * Getter that retrieves document IDs of linked documents that have AI-related content. */ @computed get docIds() { return LinkManager.Instance.getAllRelatedLinks(this.Document) .map(d => DocCast(LinkManager.getOppositeAnchor(d, this.Document))) .map(d => DocCast(d?.annotationOn, d)) .filter(d => d) .map(d => d!) .filter(d => { console.log(d.ai_doc_id); return d.ai_doc_id; }) .map(d => StrCast(d.ai_doc_id)); } /** * Getter that retrieves summaries of all linked documents. */ @computed get summaries(): string { return ( LinkManager.Instance.getAllRelatedLinks(this.Document) .map(d => DocCast(LinkManager.getOppositeAnchor(d, this.Document))) .map(d => DocCast(d?.annotationOn, d)) .filter(d => d?.summary) .map((doc, index) => { if (PDFCast(doc?.data)) { return `${doc!.summary}`; } else if (CsvCast(doc?.data)) { return `${doc!.summary}`; } else { return `${index + 1}) ${doc?.summary}`; } }) .join('\n') + '\n' ); } /** * Getter that retrieves all linked CSV files for analysis. */ @computed get linkedCSVs(): { filename: string; id: string; text: string }[] { return this._linked_csv_files; } /** * Getter that formats the entire chat history as a string for the agent's system message. */ @computed get formattedHistory(): string { let history = '\n'; for (const message of this._history) { history += `<${message.role}>${message.content.map(content => content.text).join(' ')}`; if (message.loop_summary) { history += `${message.loop_summary}`; } history += `\n`; } history += ''; return history; } // Other helper methods for retrieving document data and processing retrieveSummaries = () => { return this.summaries; }; retrieveCSVData = () => { return this.linkedCSVs; }; retrieveFormattedHistory = () => { return this.formattedHistory; }; retrieveDocIds = () => { return this.docIds; }; /** * Handles follow-up questions when the user clicks on them. * Automatically sets the input value to the clicked follow-up question. * @param question The follow-up question clicked by the user. */ @action handleFollowUpClick = (question: string) => { this._inputValue = question; }; _dictation: DictationButton | null = null; setInputRef = (r: HTMLInputElement) => (this._textInputRef = r); setDictationRef = (r: DictationButton) => (this._dictation = r); /** * Renders the chat interface, including the message list, input field, and other UI elements. */ render() { return (
{this._isUploadingDocs && (
{this._currentStep}
)}

{this.userName()}'s AI Assistant

{this._history.map((message, index) => ( ))} {this._current_message && ( )}
(this._inputValue = e.target.value))} disabled={this._isLoading} /> {/* Popup for citation */} {this._citationPopup.visible && (

Text from your document: {this._citationPopup.text}

)}
); } } /** * Register the ChatBox component as the template for CHAT document types. */ Docs.Prototypes.TemplateMap.set(DocumentType.CHAT, { layout: { view: ChatBox, dataField: 'data' }, options: { acl: '', _layout_fitWidth: true, chat: '', chat_history: '', chat_thread_id: '', chat_assistant_id: '', chat_vector_store_id: '' }, });