import { Button, IconButton, Size, Toggle, ToggleType, Type } from '@dash/components'; import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; import { action, makeObservable, observable, reaction } from 'mobx'; import { observer } from 'mobx-react'; import * as React from 'react'; import { AiOutlineSend } from 'react-icons/ai'; import { CgCornerUpLeft } from 'react-icons/cg'; import ReactLoading from 'react-loading'; import { TypeAnimation } from 'react-type-animation'; import { ClientUtils } from '../../../../ClientUtils'; import { Doc } from '../../../../fields/Doc'; import { List } from '../../../../fields/List'; import { NumCast, StrCast } from '../../../../fields/Types'; import { ImageField } from '../../../../fields/URLField'; import { Upload } from '../../../../server/SharedMediaTypes'; import { Networking } from '../../../Network'; import { DataSeperator, DescEnd, DescStart, DocSeperator, GPTCallType, GPTDocCommand, gptAPICall, gptImageCall } from '../../../apis/gpt/GPT'; import { DocUtils } from '../../../documents/DocUtils'; import { Docs } from '../../../documents/Documents'; import { SettingsManager } from '../../../util/SettingsManager'; import { SnappingManager } from '../../../util/SnappingManager'; import { undoable } from '../../../util/UndoManager'; import { DictationButton } from '../../DictationButton'; import { ObservableReactComponent } from '../../ObservableReactComponent'; import { TagItem } from '../../TagsView'; import { ChatSortField, docSortings } from '../../collections/CollectionSubView'; import { ComparisonBox } from '../../nodes/ComparisonBox'; import { DocumentView, DocumentViewInternal } from '../../nodes/DocumentView'; import { OpenWhere } from '../../nodes/OpenWhere'; import { DrawingFillHandler } from '../../smartdraw/DrawingFillHandler'; import { FireflyImageDimensions } from '../../smartdraw/FireflyConstants'; import { SmartDrawHandler } from '../../smartdraw/SmartDrawHandler'; import { AnchorMenu } from '../AnchorMenu'; import './GPTPopup.scss'; export enum GPTPopupMode { SUMMARY, // summary of seleted document text IMAGE, // generate image from image description DATA, GPT_MENU, // menu for choosing type of prompts user will provide USER_PROMPT, // user prompts for sorting,filtering and asking about docs QUIZ_RESPONSE, // user definitions or explanations to be evaluated by GPT FIREFLY, // firefly image generation } @observer export class GPTPopup extends ObservableReactComponent { static Instance: GPTPopup; static ChatTag = '#chat'; // tag used by GPT popup to filter docs private _askDictation: DictationButton | null = null; private _messagesEndRef: React.RefObject; private _correlatedColumns: string[] = []; private _dataChatPrompt: string | undefined = undefined; private _imgTargetDoc: Doc | undefined; private _textAnchor: Doc | undefined; private _dataJson: string = ''; private _documentDescriptions: Promise | undefined; // a cache of the descriptions of all docs in the selected collection. makes it more efficient when asking GPT multiple questions about the collection. private _sidebarFieldKey: string = ''; private _aiReferenceText: string = ''; private _imageDescription: string = ''; private _textToDocMap = new Map(); // when GPT answers with a doc's content, this helps us find the Doc private _addToCollection: ((doc: Doc | Doc[], annotationKey?: string | undefined) => boolean) | undefined; constructor(props: object) { super(props); makeObservable(this); GPTPopup.Instance = this; this._messagesEndRef = React.createRef(); } public addDoc: ((doc: Doc | Doc[], sidebarKey?: string | undefined) => boolean) | undefined; public createFilteredDoc: (axes?: string[]) => boolean = () => false; public setSidebarFieldKey = (id: string) => (this._sidebarFieldKey = id); public setImgTargetDoc = (anchor: Doc) => (this._imgTargetDoc = anchor); public setTextAnchor = (anchor: Doc) => (this._textAnchor = anchor); public setDataJson = (text: string) => { if (text === '') this._dataChatPrompt = ''; this._dataJson = text; }; componentDidUpdate() { //this._gptProcessing && this.setStopAnimatingResponse(false); } componentDidMount(): void { reaction( () => ({ selDoc: DocumentView.Selected().lastElement(), visible: SnappingManager.ChatVisible }), ({ selDoc, visible }) => { const hasChildDocs = visible && selDoc?.ComponentView?.hasChildDocs; if (hasChildDocs) { this._textToDocMap.clear(); this.setCollectionContext(selDoc.Document); this.onGptResponse = (sortResult: string, questionType: GPTDocCommand) => this.processGptResponse(selDoc, this._textToDocMap, sortResult, questionType); this.onQuizRandom = () => this.randomlyChooseDoc(selDoc.Document, hasChildDocs()); this._documentDescriptions = Promise.all(hasChildDocs().map(doc => Doc.getDescription(doc).then(text => text.replace(/\n/g, ' ').trim()) .then(text => this._textToDocMap.set(text, doc) && `${DescStart}${text}${DescEnd}`) )).then(docDescriptions => docDescriptions.join('')); // prettier-ignore this._documentDescriptions.then(descs => { console.log(descs); }); } }, { fireImmediately: true } ); } @observable private _showOriginal = true; @observable private _responseText: string = ''; @observable private _conversationArray: string[] = ['Hi! In this pop up, you can ask ChatGPT questions about your documents and filter / sort them. ']; @observable private _fireflyArray: string[] = ['Hi! In this pop up, you can ask Firefly to create images. ']; @observable private _chatEnabled: boolean = false; @action private setChatEnabled = (start: boolean) => (this._chatEnabled = start); @observable private _gptProcessing: boolean = false; @action private setGptProcessing = (loading: boolean) => (this._gptProcessing = loading); @observable private _imgUrls: string[][] = []; @action private setImgUrls = (imgs: string[][]) => (this._imgUrls = imgs); @observable private _collectionContext: Doc | undefined = undefined; @action setCollectionContext = (doc: Doc | undefined) => (this._collectionContext = doc); @observable private _userPrompt: string = ''; @action setUserPrompt = (e: string) => (this._userPrompt = e); @observable private _quizAnswer: string = ''; @action setQuizAnswer = (e: string) => (this._quizAnswer = e); @observable private _stopAnimatingResponse: boolean = false; @action private setStopAnimatingResponse = (done: boolean) => (this._stopAnimatingResponse = done); @observable private _mode: GPTPopupMode = GPTPopupMode.SUMMARY; @action public setMode = (mode: GPTPopupMode) => (this._mode = mode); onQuizRandom?: () => void; onGptResponse?: (sortResult: string, questionType: GPTDocCommand, args?: string) => void; NumberToCommandType = (questionType: string) => +questionType.split(' ')[0][0]; /** * Processes gpt's output depending on the type of question the user asked. Converts gpt's string output to * usable code * @param gptOutput * @param questionType * @param tag */ processGptResponse = (docView: DocumentView, textToDocMap: Map, gptOutput: string, questionType: GPTDocCommand) => undoable(() => { switch (questionType) { // reset collection based on question typefc case GPTDocCommand.Sort: docView.Document[docView.ComponentView?.fieldKey + '_sort'] = docSortings.Chat; break; case GPTDocCommand.Filter: docView.ComponentView?.hasChildDocs?.().forEach(d => TagItem.removeTagFromDoc(d, GPTPopup.ChatTag)); break; } // prettier-ignore gptOutput.split(DescEnd).filter(item => item.trim() !== '') // Split output into individual document contents .map(docContentRaw => docContentRaw.replace(DescStart,"").replace(/\n/g, ' ').trim()) .map(docContentRaw => ({doc: textToDocMap.get(docContentRaw.split(DataSeperator)[0]), data: docContentRaw.split(DataSeperator)[1] })) // the find the corresponding Doc using textToDoc map .filter(({doc}) => doc).map(({doc, data}) => ({doc:doc!, data})) // filter out undefined values .forEach(({doc, data}, index) => { switch (questionType) { case GPTDocCommand.Sort: doc[ChatSortField] = index; break; case GPTDocCommand.AssignTags: data && TagItem.addTagToDoc(doc, data.startsWith('#') ? data : '#'+data[0].toLowerCase()+data.slice(1) ); break; case GPTDocCommand.Filter: TagItem.addTagToDoc(doc, GPTPopup.ChatTag); Doc.setDocFilter(docView.Document, 'tags', GPTPopup.ChatTag, 'check'); break; } }); // prettier-ignore }, '')(); /** * When in quiz mode, randomly selects a document */ randomlyChooseDoc = (doc: Doc, childDocs: Doc[]) => DocumentView.getDocumentView(childDocs[Math.floor(Math.random() * childDocs.length)])?.select(false); /** * Generates a rubric for evaluating the user's description of the document's text * @param doc the doc the user is providing info about * @returns gpt's response rubric */ generateRubric = (doc: Doc) => StrCast(doc.gptRubric) ? Promise.resolve(StrCast(doc.gptRubric)) : Doc.getDescription(doc).then(desc => gptAPICall(desc, GPTCallType.MAKERUBRIC) .then(res => (doc.gptRubric = res)) .catch(err => console.error('GPT call failed', err)) ); /** * When the cards are in quiz mode in the card view, allows gpt to determine whether the user's answer was correct * @param doc the doc the user is providing info about * @param quizAnswer the user's answer/description for the document * @returns */ generateQuizAnswerAnalysis = (doc: Doc, quizAnswer: string) => this.generateRubric(doc).then(() => Doc.getDescription(doc).then(desc => gptAPICall( `Question: ${desc}; UserAnswer: ${quizAnswer}; Rubric: ${StrCast(doc.gptRubric)}`, GPTCallType.QUIZDOC ).then(res => { this._conversationArray.push(res || 'GPT provided no answer'); this.onQuizRandom?.(); }) .catch(err => console.error('GPT call failed', err)) )) // prettier-ignore generateFireflyImage = (imgDesc: string) => { const selView = DocumentView.Selected().lastElement(); const selDoc = selView?.Document; if (selDoc && (selView._props.renderDepth > 1 || selDoc[Doc.LayoutDataKey(selDoc)] instanceof ImageField)) { const oldPrompt = StrCast(selDoc.ai_prompt, StrCast(selDoc.title)); const newPrompt = oldPrompt ? `${oldPrompt} ~~~ ${imgDesc}` : imgDesc; return DrawingFillHandler.drawingToImage(selDoc, 100, newPrompt, selDoc) .then(action(() => (this._userPrompt = ''))) .catch(e => { alert(e); return undefined; }); } return SmartDrawHandler.CreateWithFirefly(imgDesc, FireflyImageDimensions.Square, 0) .then( action(doc => { doc instanceof Doc && DocumentViewInternal.addDocTabFunc(doc, OpenWhere.addRight); this._userPrompt = ''; }) ) .catch(e => { alert(e); return undefined; }); }; /** * Generates a response to the user's question about the docs in the collection. * The type of response depends on the chat's analysis of the type of their question * @param userPrompt the user's input that chat will respond to */ generateUserPromptResponse = (userPrompt: string) => gptAPICall(userPrompt, GPTCallType.COMMANDTYPE, undefined, true).then(commandType => (async () => { switch (this.NumberToCommandType(commandType)) { case GPTDocCommand.AssignTags:return this._documentDescriptions?.then(descs => gptAPICall(userPrompt, GPTCallType.TAGDOCS, descs)) ?? ""; case GPTDocCommand.Filter: return this._documentDescriptions?.then(descs => gptAPICall(userPrompt, GPTCallType.SUBSETDOCS, descs)) ?? ""; case GPTDocCommand.Sort: return this._documentDescriptions?.then(descs => gptAPICall(userPrompt, GPTCallType.SORTDOCS, descs)) ?? ""; default: return Doc.getDescription(DocumentView.SelectedDocs().lastElement()).then(desc => gptAPICall(userPrompt, GPTCallType.DOCINFO, desc)); } // prettier-ignore })().then( action(res => { // Trigger the callback with the result this.onGptResponse?.(res || 'Something went wrong :(', this.NumberToCommandType(commandType)); this._conversationArray.push( this.NumberToCommandType(commandType) === GPTDocCommand.GetInfo ? res: // Extract explanation surrounded by the DocSeperator string (defined in GPT.ts) at the top or both at the top and bottom (res.match(new RegExp(`${DocSeperator}\\s*([\\s\\S]*?)\\s*(?:${DocSeperator}|$)`)) ?? [])[1]?.trim() ?? 'No explanation found' ); }) ).catch(err => console.log(err)) ).catch(err => console.log(err)); // prettier-ignore /** * Generates a Dalle image and uploads it to the server. */ generateImage = (imgDesc: string, imgTarget: Doc, addToCollection?: (doc: Doc | Doc[], annotationKey?: string | undefined) => boolean) => { this._imgTargetDoc = imgTarget; SnappingManager.SetChatVisible(true); this.addDoc = addToCollection; this.setImgUrls([]); this.setMode(GPTPopupMode.IMAGE); this.setGptProcessing(true); this._imageDescription = imgDesc; return gptImageCall(imgDesc) .then(imageUrls => imageUrls?.[0] ? Networking.PostToServer('/uploadRemoteImage', { sources: [imageUrls[0]] }).then(res => { const source = ClientUtils.prepend((res as Upload.FileInformation[])[0].accessPaths.agnostic.client); return this.setImgUrls([[imageUrls[0]!, source]]); }) : undefined ) .catch(err => console.error(err)) .finally(() => this.setGptProcessing(false)); }; /** * Completes an API call to generate a summary of the specified text * * @param text the text to summarize */ private generateSummary = action((text: string) => { SnappingManager.SetChatVisible(true); this._showOriginal = false; this.setGptProcessing(true); return gptAPICall(text, GPTCallType.SUMMARY) .then(action(res => (this._responseText = res || 'Something went wrong.'))) .catch(err => console.error(err)) .finally(() => this.setGptProcessing(false)); }); /** * Completes an API call to generate a summary of the specified text * * @param text the text to summarizz */ askAIAboutSelection = action((text: string) => { SnappingManager.SetChatVisible(true); this._aiReferenceText = text; this._responseText = ''; this._showOriginal = true; this.setMode(GPTPopupMode.SUMMARY); }); /** * Completes an API call to generate an analysis of * this.dataJson in the popup. */ generateDataAnalysis = () => { this.setGptProcessing(true); return gptAPICall(this._dataJson, GPTCallType.DATA, this._dataChatPrompt) .then( action(res => { const json = JSON.parse(res! as string); const keys = Object.keys(json); this._correlatedColumns = []; this._correlatedColumns.push(json[keys[0]]); this._correlatedColumns.push(json[keys[1]]); this._responseText = json[keys[2]] || 'Something went wrong.'; }) ) .catch(err => console.error(err)) .finally(() => this.setGptProcessing(false)); }; /** * Transfers the summarization text to a sidebar annotation text document. */ private transferToText = () => { const newDoc = Docs.Create.TextDocument(this._responseText.trim(), { _width: 200, _height: 50, _layout_fitWidth: true, _layout_autoHeight: true, }); this.addDoc?.(newDoc, this._sidebarFieldKey); const anchor = AnchorMenu.Instance?.GetAnchor(undefined, false); if (anchor) { DocUtils.MakeLink(newDoc, anchor, { link_relationship: 'GPT Summary', }); } }; /** * Create Flashcards for the selected text */ private createFlashcards = action( () => this.setGptProcessing(true) && gptAPICall(this._aiReferenceText, GPTCallType.FLASHCARD, undefined, true) .then(res => ComparisonBox.createFlashcardDeck(res, 250, 200, 'data_front', 'data_back').then( action(newCol => { newCol.zIndex = 1000; DocumentViewInternal.addDocTabFunc(newCol, OpenWhere.addRight); }) ) ) .catch(console.error) .finally(action(() => (this._gptProcessing = false))) ); /** * Creates a histogram to show the correlation relationship that was found */ private createVisualization = () => this.createFilteredDoc(this._correlatedColumns); /** * Transfers the image urls to actual image docs */ private transferToImage = (source: string) => { const textAnchor = this._textAnchor ?? this._imgTargetDoc; if (textAnchor) { const newDoc = Docs.Create.ImageDocument(source, { x: NumCast(textAnchor.x) + NumCast(textAnchor._width) + 10, y: NumCast(textAnchor.y), _height: 200, _width: 200, ai: 'dall-e', tags: new List(['@ai']), data_nativeWidth: 1024, data_nativeHeight: 1024, }); if (Doc.IsInMyOverlay(textAnchor)) { newDoc.overlayX = textAnchor.x; newDoc.overlayY = NumCast(textAnchor.y) + NumCast(textAnchor._height); Doc.AddToMyOverlay(newDoc); } else { this.addDoc?.(newDoc); } // Create link between prompt and image DocUtils.MakeLink(textAnchor, newDoc, { link_relationship: 'Image Prompt' }); } }; scrollToBottom = () => setTimeout(() => this._messagesEndRef.current?.scrollIntoView({ behavior: 'smooth', block: 'end' }), 50); gptMenu = () => (
); callGpt = action((mode: GPTPopupMode) => { this.setGptProcessing(true); const reset = action(() => { this.setGptProcessing(false); this._userPrompt = ''; this._quizAnswer = ''; }); switch (mode) { case GPTPopupMode.FIREFLY: this._fireflyArray.push(this._userPrompt); return this.generateFireflyImage(this._userPrompt).then(reset); case GPTPopupMode.USER_PROMPT: this._conversationArray.push(this._userPrompt); return this.generateUserPromptResponse(this._userPrompt).then(reset); case GPTPopupMode.QUIZ_RESPONSE: this._conversationArray.push(this._quizAnswer); return this.generateQuizAnswerAnalysis(DocumentView.SelectedDocs().lastElement(), this._quizAnswer).then(reset); } }); @action handleKeyPress = async (e: React.KeyboardEvent, mode: GPTPopupMode) => { this._askDictation?.stopDictation(); if (e.key === 'Enter') { e.stopPropagation(); this.callGpt(mode)?.then(() => { this.setGptProcessing(false); this.scrollToBottom(); }); } }; gptUserInput = () => (
{(this._mode === GPTPopupMode.FIREFLY ? this._fireflyArray : this._conversationArray).map((message, index) => (
{message}
))} {this._gptProcessing &&
...
}
); setDictationRef = (r: DictationButton | null) => (this._askDictation = r); promptBox = (heading: string, value: string, onChange: (e: string) => string, placeholder: string) => ( <>
{this.heading(heading)} {this.gptUserInput()}
onChange(e.target.value)} onKeyDown={e => this.handleKeyPress(e, this._mode)} type="text" style={{ color: 'black' }} placeholder={placeholder} />
); menuBox = () => (
{this.heading('CHOOSE')} {this.gptMenu()}
); imageBox = () => (
{this.heading('GENERATED IMAGE')}
{this._imgUrls.map((rawSrc, i) => ( <>
dalle generation
))}
{this._gptProcessing ? null : ( this._imgTargetDoc && this.generateImage(this._imageDescription, this._imgTargetDoc, this._addToCollection)} icon={} color={SettingsManager.userColor} background={SettingsManager.userVariantColor} /> )}
); summaryBox = () => ( <>
(this._showOriginal = !this._showOriginal))}>{this.heading(this._showOriginal ? 'SELECTION' : 'SUMMARY')}
{!this._gptProcessing && !this._stopAnimatingResponse && this._responseText ? ( { setTimeout(() => this.setStopAnimatingResponse(true), 500); }, ]} /> ) : this._showOriginal ? ( this._gptProcessing ? ( '...generating cards...' ) : ( this._aiReferenceText ) ) : ( this._responseText || (this._gptProcessing ? '...generating summary...' : '-no ai summary-') )}
{this._gptProcessing ? null : (
{this._stopAnimatingResponse || !this._responseText ? (
{!this._showOriginal ? ( <>
) : (
{this._showOriginal ? 'Creating Cards...' : 'Summarizing'}
)}
)} ); dataAnalysisBox = () => ( <>
{this.heading('ANALYSIS')}
{!this._gptProcessing && (!this._stopAnimatingResponse ? ( { setTimeout(() => this.setStopAnimatingResponse(true), 500); }, ]} /> ) : ( this._responseText ))}
{!this._gptProcessing && (
{this._stopAnimatingResponse ? ( this._chatEnabled ? ( (this._dataChatPrompt = e.target.value)} onKeyDown={e => { e.key === 'Enter' ? this.generateDataAnalysis() : null; e.stopPropagation(); }} type="text" placeholder="Ask GPT a question about the data..." id="search-input" className="searchBox-input" style={{ width: '100%', color: SnappingManager.userColor }} /> ) : ( <>
)}
)} ); aiWarning = () => !this._stopAnimatingResponse ? null : (
AI generated responses can contain inaccurate or misleading content.
); heading = (headingText: string) => (
{this._gptProcessing ? ( ) : ( <> this._collectionContext && Doc.setDocFilter(this._collectionContext, 'tags', GPTPopup.ChatTag, 'remove')} /> {[GPTPopupMode.USER_PROMPT, GPTPopupMode.QUIZ_RESPONSE, GPTPopupMode.FIREFLY].includes(this._mode) && ( } onClick={action(() => (this._mode = GPTPopupMode.GPT_MENU))} /> )} )}
); render() { return (
{(() => { //prettier-ignore switch (this._mode) { case GPTPopupMode.USER_PROMPT: return this.promptBox("ASK", this._userPrompt, this.setUserPrompt, 'Ask GPT to sort, tag, define, or filter your documents for you!'); case GPTPopupMode.FIREFLY: return this.promptBox("CREATE", this._userPrompt, this.setUserPrompt, StrCast(DocumentView.Selected().lastElement()?.Document.ai_prompt, 'Ask Firefly to generate images')); case GPTPopupMode.QUIZ_RESPONSE: return this.promptBox("QUIZ", this._quizAnswer, this.setQuizAnswer, 'Describe/answer the selected document!'); case GPTPopupMode.GPT_MENU: return this.menuBox(); case GPTPopupMode.SUMMARY: return this.summaryBox(); case GPTPopupMode.DATA: return this.dataAnalysisBox(); case GPTPopupMode.IMAGE: return this.imageBox(); default: return null; } })()}
); } }