diff options
Diffstat (limited to 'src')
-rw-r--r-- | src/client/apis/gpt/GPT.ts | 87 | ||||
-rw-r--r-- | src/client/views/MainView.tsx | 3 | ||||
-rw-r--r-- | src/client/views/MarqueeAnnotator.tsx | 2 | ||||
-rw-r--r-- | src/client/views/nodes/ImageBox.tsx | 2 | ||||
-rw-r--r-- | src/client/views/nodes/PDFBox.tsx | 1 | ||||
-rw-r--r-- | src/client/views/nodes/WebBox.tsx | 7 | ||||
-rw-r--r-- | src/client/views/nodes/formattedText/FormattedTextBox.scss | 4 | ||||
-rw-r--r-- | src/client/views/nodes/formattedText/FormattedTextBox.tsx | 67 | ||||
-rw-r--r-- | src/client/views/pdf/AnchorMenu.tsx | 184 | ||||
-rw-r--r-- | src/client/views/pdf/GPTPopup/GPTPopup.scss | 132 | ||||
-rw-r--r-- | src/client/views/pdf/GPTPopup/GPTPopup.tsx | 188 | ||||
-rw-r--r-- | src/client/views/pdf/PDFViewer.tsx | 12 |
12 files changed, 682 insertions, 7 deletions
diff --git a/src/client/apis/gpt/GPT.ts b/src/client/apis/gpt/GPT.ts new file mode 100644 index 000000000..4b3960902 --- /dev/null +++ b/src/client/apis/gpt/GPT.ts @@ -0,0 +1,87 @@ +import { Configuration, OpenAIApi } from 'openai'; + +enum GPTCallType { + SUMMARY = 'summary', + COMPLETION = 'completion', + EDIT = 'edit', +} + +type GPTCallOpts = { + model: string; + maxTokens: number; + temp: number; + prompt: string; +}; + +const callTypeMap: { [type: string]: GPTCallOpts } = { + summary: { model: 'text-davinci-003', maxTokens: 256, temp: 0.5, prompt: 'Summarize this text briefly: ' }, + edit: { model: 'text-davinci-003', maxTokens: 256, temp: 0.5, prompt: 'Reword this: ' }, + completion: { model: 'text-davinci-003', maxTokens: 256, temp: 0.5, prompt: '' }, +}; + +/** + * Calls the OpenAI API. + * + * @param inputText Text to process + * @returns AI Output + */ +const gptAPICall = async (inputText: string, callType: GPTCallType) => { + if (callType === GPTCallType.SUMMARY) inputText += '.'; + const opts: GPTCallOpts = callTypeMap[callType]; + try { + const configuration = new Configuration({ + apiKey: process.env.OPENAI_KEY, + }); + const openai = new OpenAIApi(configuration); + const response = await openai.createCompletion({ + model: opts.model, + max_tokens: opts.maxTokens, + temperature: opts.temp, + prompt: `${opts.prompt}${inputText}`, + }); + console.log(response.data.choices[0]); + return response.data.choices[0].text; + } catch (err) { + console.log(err); + return 'Error connecting with API.'; + } +}; + +const gptImageCall = async (prompt: string) => { + try { + const configuration = new Configuration({ + apiKey: process.env.OPENAI_KEY, + }); + const openai = new OpenAIApi(configuration); + const response = await openai.createImage({ + prompt: prompt, + n: 1, + size: '1024x1024', + }); + return response.data.data[0].url; + } catch (err) { + console.error(err); + return; + } +}; + +// const gptEditCall = async (selectedText: string, fullText: string) => { +// try { +// const configuration = new Configuration({ +// apiKey: process.env.OPENAI_KEY, +// }); +// const openai = new OpenAIApi(configuration); +// const response = await openai.createCompletion({ +// model: 'text-davinci-003', +// max_tokens: 256, +// temperature: 0.1, +// prompt: `Replace the phrase ${selectedText} inside of ${fullText}.`, +// }); +// return response.data.choices[0].text.trim(); +// } catch (err) { +// console.log(err); +// return 'Error connecting with API.'; +// } +// }; + +export { gptAPICall, gptImageCall, GPTCallType }; diff --git a/src/client/views/MainView.tsx b/src/client/views/MainView.tsx index ccc9a7215..acbe0cbc3 100644 --- a/src/client/views/MainView.tsx +++ b/src/client/views/MainView.tsx @@ -243,6 +243,7 @@ export class MainView extends React.Component { library.add( ...[ + fa.faExclamationCircle, fa.faEdit, fa.faTrash, fa.faTrashAlt, @@ -275,6 +276,7 @@ export class MainView extends React.Component { fa.faHandPointRight, fa.faCompass, fa.faSnowflake, + fa.faStar, fa.faMicrophone, fa.faKeyboard, fa.faQuestion, @@ -322,6 +324,7 @@ export class MainView extends React.Component { fa.faClone, fa.faCloudUploadAlt, fa.faCommentAlt, + fa.faCommentDots, fa.faCompressArrowsAlt, fa.faCut, fa.faEllipsisV, diff --git a/src/client/views/MarqueeAnnotator.tsx b/src/client/views/MarqueeAnnotator.tsx index ede387927..8fd2b87cc 100644 --- a/src/client/views/MarqueeAnnotator.tsx +++ b/src/client/views/MarqueeAnnotator.tsx @@ -206,7 +206,7 @@ export class MarqueeAnnotator extends React.Component<MarqueeAnnotatorProps> { return textRegionAnno; }; @action - highlight = (color: string, isLinkButton: boolean, savedAnnotations?: ObservableMap<number, HTMLDivElement[]>, addAsAnnotation?: boolean) => { + highlight = (color: string, isLinkButton: boolean, savedAnnotations?: ObservableMap<number, HTMLDivElement[]>, addAsAnnotation?: boolean, summarize?: boolean) => { // creates annotation documents for current highlights const effectiveAcl = GetEffectiveAcl(this.props.rootDoc[DataSym]); const annotationDoc = [AclAugment, AclSelfEdit, AclEdit, AclAdmin].includes(effectiveAcl) && this.makeAnnotationDocument(color, isLinkButton, savedAnnotations); diff --git a/src/client/views/nodes/ImageBox.tsx b/src/client/views/nodes/ImageBox.tsx index 7c98aa6e4..e8d4be1fd 100644 --- a/src/client/views/nodes/ImageBox.tsx +++ b/src/client/views/nodes/ImageBox.tsx @@ -352,7 +352,7 @@ export class ImageBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp @computed get nativeSize() { TraceMobx(); const nativeWidth = NumCast(this.dataDoc[this.fieldKey + '-nativeWidth'], NumCast(this.layoutDoc[this.fieldKey + '-nativeWidth'], 500)); - const nativeHeight = NumCast(this.dataDoc[this.fieldKey + '-nativeHeight'], NumCast(this.layoutDoc[this.fieldKey + '-nativeHeight'], 1)); + const nativeHeight = NumCast(this.dataDoc[this.fieldKey + '-nativeHeight'], NumCast(this.layoutDoc[this.fieldKey + '-nativeHeight'], 500)); const nativeOrientation = NumCast(this.dataDoc[this.fieldKey + '-nativeOrientation'], 1); return { nativeWidth, nativeHeight, nativeOrientation }; } diff --git a/src/client/views/nodes/PDFBox.tsx b/src/client/views/nodes/PDFBox.tsx index 8f87b9e08..6aa04e356 100644 --- a/src/client/views/nodes/PDFBox.tsx +++ b/src/client/views/nodes/PDFBox.tsx @@ -586,6 +586,7 @@ export class PDFBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProps }}> <PDFViewer {...this.props} + sidebarAddDoc={this.sidebarAddDocument} rootDoc={this.rootDoc} addDocTab={this.sidebarAddDocTab} layoutDoc={this.layoutDoc} diff --git a/src/client/views/nodes/WebBox.tsx b/src/client/views/nodes/WebBox.tsx index ef3aa1f47..e05b48c0b 100644 --- a/src/client/views/nodes/WebBox.tsx +++ b/src/client/views/nodes/WebBox.tsx @@ -28,6 +28,7 @@ import { LightboxView } from '../LightboxView'; import { MarqueeAnnotator } from '../MarqueeAnnotator'; import { AnchorMenu } from '../pdf/AnchorMenu'; import { Annotation } from '../pdf/Annotation'; +import { GPTPopup } from '../pdf/GPTPopup/GPTPopup'; import { SidebarAnnos } from '../SidebarAnnos'; import { StyleProp } from '../StyleProvider'; import { DocFocusOptions, DocumentView, DocumentViewProps, OpenWhere } from './DocumentView'; @@ -366,8 +367,13 @@ export class WebBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProps const scale = (this.props.NativeDimScaling?.() || 1) * mainContBounds.scale; const sel = this._iframe.contentWindow.getSelection(); if (sel) { + this._selectionText = sel.toString(); + AnchorMenu.Instance.setSelectedText(sel.toString()); this._textAnnotationCreator = () => this.createTextAnnotation(sel, !sel.isCollapsed ? sel.getRangeAt(0) : undefined); AnchorMenu.Instance.jumpTo(e.clientX * scale + mainContBounds.translateX, e.clientY * scale + mainContBounds.translateY - NumCast(this.layoutDoc._scrollTop) * scale); + // Changing which document to add the annotation to (the currently selected WebBox) + GPTPopup.Instance.setSidebarId(`${this.props.fieldKey}-${this._urlHash}-sidebar`); + GPTPopup.Instance.addDoc = this.sidebarAddDocument; } } }; @@ -785,6 +791,7 @@ export class WebBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProps } addDocumentWrapper = (doc: Doc | Doc[], annotationKey?: string) => { + console.log(annotationKey); (doc instanceof Doc ? [doc] : doc).forEach(doc => (doc.webUrl = this._url)); return this.addDocument(doc, annotationKey); }; diff --git a/src/client/views/nodes/formattedText/FormattedTextBox.scss b/src/client/views/nodes/formattedText/FormattedTextBox.scss index cbe0a465d..fd7fbb333 100644 --- a/src/client/views/nodes/formattedText/FormattedTextBox.scss +++ b/src/client/views/nodes/formattedText/FormattedTextBox.scss @@ -149,6 +149,10 @@ audiotag:hover { } } +.gpt-typing-wrapper { + padding: 10px; +} + // .menuicon { // display: inline-block; // border-right: 1px solid rgba(0, 0, 0, 0.2); diff --git a/src/client/views/nodes/formattedText/FormattedTextBox.tsx b/src/client/views/nodes/formattedText/FormattedTextBox.tsx index f5826ef95..3e60441aa 100644 --- a/src/client/views/nodes/formattedText/FormattedTextBox.tsx +++ b/src/client/views/nodes/formattedText/FormattedTextBox.tsx @@ -3,6 +3,7 @@ import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; import { isEqual } from 'lodash'; import { action, computed, IReactionDisposer, observable, ObservableSet, reaction, runInAction } from 'mobx'; import { observer } from 'mobx-react'; +import { Configuration, OpenAIApi } from 'openai'; import { baseKeymap, selectAll } from 'prosemirror-commands'; import { history } from 'prosemirror-history'; import { inputRules } from 'prosemirror-inputrules'; @@ -22,9 +23,11 @@ import { BoolCast, Cast, DocCast, FieldValue, NumCast, ScriptCast, StrCast } fro import { GetEffectiveAcl, TraceMobx } from '../../../../fields/util'; import { addStyleSheet, addStyleSheetRule, clearStyleSheetRules, emptyFunction, numberRange, returnFalse, returnZero, setupMoveUpEvents, smoothScroll, unimplementedFunction, Utils } from '../../../../Utils'; import { GoogleApiClientUtils, Pulls, Pushes } from '../../../apis/google_docs/GoogleApiClientUtils'; +import { gptAPICall, GPTCallType, gptImageCall } from '../../../apis/gpt/GPT'; import { DocServer } from '../../../DocServer'; import { Docs, DocUtils } from '../../../documents/Documents'; import { CollectionViewType, DocumentType } from '../../../documents/DocumentTypes'; +import { Networking } from '../../../Network'; import { DictationManager } from '../../../util/DictationManager'; import { DocumentManager } from '../../../util/DocumentManager'; import { DragManager } from '../../../util/DragManager'; @@ -66,9 +69,7 @@ import { SummaryView } from './SummaryView'; import applyDevTools = require('prosemirror-dev-tools'); import React = require('react'); const translateGoogleApi = require('translate-google-api'); - export const GoogleRef = 'googleDocId'; - type PullHandler = (exportState: Opt<GoogleApiClientUtils.Docs.ImportResult>, dataDoc: Doc) => void; @observer @@ -172,6 +173,10 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps }; } + // State for GPT + @observable + private gptRes: string = ''; + public static PasteOnLoad: ClipboardEvent | undefined; public static SelectOnLoad = ''; public static DontSelectInitialText = false; // whether initial text should be selected or not @@ -845,12 +850,65 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps const options = cm.findByDescription('Options...'); const optionItems = options && 'subitems' in options ? options.subitems : []; + optionItems.push({ description: `Generate Dall-E Image`, event: () => this.generateImage(), icon: 'star' }); + optionItems.push({ description: `Ask GPT-3`, event: () => this.askGPT(), icon: 'lightbulb' }); optionItems.push({ description: !this.Document._singleLine ? 'Make Single Line' : 'Make Multi Line', event: () => (this.layoutDoc._singleLine = !this.layoutDoc._singleLine), icon: !this.Document._singleLine ? 'grip-lines' : 'bars' }); optionItems.push({ description: `${this.Document._autoHeight ? 'Lock' : 'Auto'} Height`, event: () => (this.layoutDoc._autoHeight = !this.layoutDoc._autoHeight), icon: this.Document._autoHeight ? 'lock' : 'unlock' }); !options && cm.addItem({ description: 'Options...', subitems: optionItems, icon: 'eye' }); this._downX = this._downY = Number.NaN; }; + animateRes = (resIndex: number) => { + if (resIndex < this.gptRes.length) { + this.dataDoc.text = (this.dataDoc.text as RichTextField)?.Text + this.gptRes[resIndex]; + setTimeout(() => { + this.animateRes(resIndex + 1); + }, 20); + } + }; + + askGPT = action(async () => { + try { + let res = await gptAPICall((this.dataDoc.text as RichTextField)?.Text, GPTCallType.COMPLETION); + if (res) { + this.gptRes = res; + this.animateRes(0); + } + } catch (err) { + console.log(err); + this.dataDoc.text = (this.dataDoc.text as RichTextField)?.Text + 'Something went wrong'; + } + }); + + generateImage = async () => { + console.log('Generate image from text: ', (this.dataDoc.text as RichTextField)?.Text); + try { + let image_url = await gptImageCall((this.dataDoc.text as RichTextField)?.Text); + if (image_url) { + const [{ accessPaths }] = await Networking.PostToServer('/uploadRemoteImage', { sources: [image_url] }); + const source = Utils.prepend(accessPaths.agnostic.client); + const newDoc = Docs.Create.ImageDocument(source, { + x: NumCast(this.rootDoc.x) + NumCast(this.layoutDoc._width) + 10, + y: NumCast(this.rootDoc.y), + _height: 200, + _width: 200, + }); + if (DocListCast(Doc.MyOverlayDocs?.data).includes(this.rootDoc)) { + newDoc.overlayX = this.rootDoc.x; + newDoc.overlayY = NumCast(this.rootDoc.y) + NumCast(this.rootDoc._height); + Doc.AddDocToList(Doc.MyOverlayDocs, undefined, newDoc); + } else { + this.props.addDocument?.(newDoc); + } + // Create link between prompt and image + DocUtils.MakeLink(this.rootDoc, newDoc, { linkRelationship: 'Image Prompt' }); + } + } catch (err) { + console.log(err); + return ''; + } + }; + breakupDictation = () => { if (this._editorView && this._recording) { this.stopDictation(true); @@ -1129,6 +1187,11 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps RichTextMenu.Instance?.updateMenu(this._editorView, undefined, this.props); this.autoLink(); } + // Accessing editor and text doc for gpt assisted text edits + if (this._editorView && selected) { + AnchorMenu.Instance?.setEditorView(this._editorView); + AnchorMenu.Instance?.setTextDoc(this.dataDoc); + } }), { fireImmediately: true } ); diff --git a/src/client/views/pdf/AnchorMenu.tsx b/src/client/views/pdf/AnchorMenu.tsx index 7392d2706..d6dddf71a 100644 --- a/src/client/views/pdf/AnchorMenu.tsx +++ b/src/client/views/pdf/AnchorMenu.tsx @@ -10,8 +10,11 @@ import { SelectionManager } from '../../util/SelectionManager'; import { AntimodeMenu, AntimodeMenuProps } from '../AntimodeMenu'; import { LinkPopup } from '../linking/LinkPopup'; import { ButtonDropdown } from '../nodes/formattedText/RichTextMenu'; -import './AnchorMenu.scss'; +import { gptAPICall, GPTCallType } from '../../apis/gpt/GPT'; +import { GPTPopup, GPTPopupMode } from './GPTPopup/GPTPopup'; import { LightboxView } from '../LightboxView'; +import { EditorView } from 'prosemirror-view'; +import './AnchorMenu.scss'; @observer export class AnchorMenu extends AntimodeMenu<AntimodeMenuProps> { @@ -43,6 +46,56 @@ export class AnchorMenu extends AntimodeMenu<AntimodeMenuProps> { @observable public Status: 'marquee' | 'annotation' | '' = ''; + // GPT additions + @observable private GPTpopupText: string = ''; + @observable private loadingGPT: boolean = false; + @observable private showGPTPopup: boolean = false; + @observable private GPTMode: GPTPopupMode = GPTPopupMode.SUMMARY; + @observable private selectedText: string = ''; + @observable private editorView?: EditorView; + @observable private textDoc?: Doc; + @observable private highlightRange: number[] | undefined; + private selectionRange: number[] | undefined; + + @action + setGPTPopupVis = (vis: boolean) => { + this.showGPTPopup = vis; + }; + @action + setGPTMode = (mode: GPTPopupMode) => { + this.GPTMode = mode; + }; + + @action + setGPTPopupText = (txt: string) => { + this.GPTpopupText = txt; + }; + + @action + setLoading = (loading: boolean) => { + this.loadingGPT = loading; + }; + + @action + setHighlightRange(r: number[] | undefined) { + this.highlightRange = r; + } + + @action + public setSelectedText = (txt: string) => { + this.selectedText = txt; + }; + + @action + public setEditorView = (editor: EditorView) => { + this.editorView = editor; + }; + + @action + public setTextDoc = (textDoc: Doc) => { + this.textDoc = textDoc; + }; + public onMakeAnchor: () => Opt<Doc> = () => undefined; // Method to get anchor from text search public OnCrop: (e: PointerEvent) => void = unimplementedFunction; @@ -76,18 +129,94 @@ export class AnchorMenu extends AntimodeMenu<AntimodeMenuProps> { componentDidMount() { this._disposer2 = reaction( () => this._opacity, - opacity => !opacity && (this._showLinkPopup = false), + opacity => { + if (!opacity) { + this._showLinkPopup = false; + this.setGPTPopupVis(false); + this.setGPTPopupText(''); + } + }, { fireImmediately: true } ); this._disposer = reaction( () => SelectionManager.Views().slice(), selected => { this._showLinkPopup = false; + this.setGPTPopupVis(false); + this.setGPTPopupText(''); AnchorMenu.Instance.fadeOut(true); } ); } + /** + * Invokes the API with the selected text and stores it in the summarized text. + * @param e pointer down event + */ + gptSummarize = async (e: React.PointerEvent) => { + this.setHighlightRange(undefined); + this.setGPTPopupVis(true); + this.setGPTMode(GPTPopupMode.SUMMARY); + this.setLoading(true); + + try { + const res = await gptAPICall(this.selectedText, GPTCallType.SUMMARY); + if (res) { + this.setGPTPopupText(res); + } else { + this.setGPTPopupText('Something went wrong.'); + } + } catch (err) { + console.error(err); + } + + this.setLoading(false); + }; + + /** + * Makes a GPT call to edit selected text. + * @returns nothing + */ + gptEdit = async () => { + if (!this.editorView) return; + this.setHighlightRange(undefined); + const state = this.editorView.state; + const sel = state.selection; + const fullText = state.doc.textBetween(0, this.editorView.state.doc.content.size, ' \n'); + const selectedText = state.doc.textBetween(sel.from, sel.to); + + this.setGPTPopupVis(true); + this.setGPTMode(GPTPopupMode.EDIT); + this.setLoading(true); + + try { + let res = await gptAPICall(selectedText, GPTCallType.EDIT); + // let res = await this.mockGPTCall(); + if (!res) return; + res = res.trim(); + const resultText = fullText.slice(0, sel.from - 1) + res + fullText.slice(sel.to - 1); + + if (res) { + this.setGPTPopupText(resultText); + this.setHighlightRange([sel.from - 1, sel.from - 1 + res.length]); + } else { + this.setGPTPopupText('Something went wrong.'); + } + } catch (err) { + console.error(err); + } + + this.setLoading(false); + }; + + /** + * Replaces text suggestions from GPT. + */ + replaceText = (replacement: string) => { + if (!this.editorView || !this.textDoc) return; + this.textDoc.text = replacement; + }; + pointerDown = (e: React.PointerEvent) => { setupMoveUpEvents( this, @@ -180,6 +309,31 @@ export class AnchorMenu extends AntimodeMenu<AntimodeMenuProps> { this.highlightColor = Utils.colorString(col); }; + /** + * Returns whether the selected text can be summarized. The goal is to have + * all selected text available to summarize but its only supported for pdf and web ATM. + * @returns Whether the GPT icon for summarization should appear + */ + canSummarize = (): boolean => { + const docs = SelectionManager.Docs(); + if (docs.length > 0) { + return docs.some(doc => doc.type === 'pdf' || doc.type === 'web'); + } + return false; + }; + + /** + * Returns whether the selected text can be edited. + * @returns Whether the GPT icon for summarization should appear + */ + canEdit = (): boolean => { + const docs = SelectionManager.Docs(); + if (docs.length > 0) { + return docs.some(doc => doc.type === 'rtf'); + } + return false; + }; + render() { const buttons = this.Status === 'marquee' ? ( @@ -190,6 +344,25 @@ export class AnchorMenu extends AntimodeMenu<AntimodeMenuProps> { <FontAwesomeIcon icon="comment-alt" size="lg" /> </button> </Tooltip> + {/* GPT Summarize icon only shows up when text is highlighted, not on marquee selection*/} + {AnchorMenu.Instance.StartCropDrag === unimplementedFunction && this.canSummarize() && ( + <Tooltip key="gpt" title={<div className="dash-tooltip">Summarize with AI</div>}> + <button className="antimodeMenu-button annotate" onPointerDown={this.gptSummarize} style={{ cursor: 'grab' }}> + <FontAwesomeIcon icon="comment-dots" size="lg" /> + </button> + </Tooltip> + )} + <GPTPopup + key="gptpopup" + visible={this.showGPTPopup} + text={this.GPTpopupText} + highlightRange={this.highlightRange} + loading={this.loadingGPT} + callSummaryApi={this.gptSummarize} + callEditApi={this.gptEdit} + replaceText={this.replaceText} + mode={this.GPTMode} + /> {AnchorMenu.Instance.OnAudio === unimplementedFunction ? null : ( <Tooltip key="annoaudiotate" title={<div className="dash-tooltip">Click to Record Annotation</div>}> <button className="antimodeMenu-button annotate" onPointerDown={this.audioDown} style={{ cursor: 'grab' }}> @@ -197,6 +370,13 @@ export class AnchorMenu extends AntimodeMenu<AntimodeMenuProps> { </button> </Tooltip> )} + {this.canEdit() && ( + <Tooltip key="gpttextedit" title={<div className="dash-tooltip">AI edit suggestions</div>}> + <button className="antimodeMenu-button annotate" onPointerDown={this.gptEdit} style={{ cursor: 'grab' }}> + <FontAwesomeIcon icon="pencil-alt" size="lg" /> + </button> + </Tooltip> + )} <Tooltip key="link" title={<div className="dash-tooltip">Find document to link to selected text</div>}> <button className="antimodeMenu-button link" onPointerDown={this.toggleLinkPopup}> <FontAwesomeIcon style={{ position: 'absolute', transform: 'scale(1.5)' }} icon={'search'} size="lg" /> diff --git a/src/client/views/pdf/GPTPopup/GPTPopup.scss b/src/client/views/pdf/GPTPopup/GPTPopup.scss new file mode 100644 index 000000000..44413ede7 --- /dev/null +++ b/src/client/views/pdf/GPTPopup/GPTPopup.scss @@ -0,0 +1,132 @@ +$textgrey: #707070; +$lighttextgrey: #a3a3a3; +$greyborder: #d3d3d3; +$lightgrey: #ececec; +$button: #5b97ff; +$highlightedText: #82e0ff; + +.summary-box { + display: flex; + flex-direction: column; + justify-content: space-between; + background-color: #ffffff; + box-shadow: 0 2px 5px #7474748d; + color: $textgrey; + position: fixed; + bottom: 10px; + right: 10px; + width: 250px; + min-height: 200px; + border-radius: 15px; + padding: 15px; + padding-bottom: 0; + z-index: 999; + + .summary-heading { + display: flex; + align-items: center; + border-bottom: 1px solid $greyborder; + padding-bottom: 5px; + + .summary-text { + font-size: 12px; + font-weight: 500; + } + } + + label { + color: $textgrey; + font-size: 12px; + font-weight: 400; + letter-spacing: 1px; + margin: 0; + padding-right: 5px; + } + + a { + cursor: pointer; + } + + .content-wrapper { + padding-top: 10px; + min-height: 50px; + max-height: 150px; + overflow-y: auto; + } + + .btns-wrapper { + height: 50px; + display: flex; + justify-content: space-between; + align-items: center; + + .summarizing { + display: flex; + align-items: center; + } + } + + button { + font-size: 9px; + padding: 10px; + color: #ffffff; + background-color: $button; + border-radius: 5px; + } + + .text-btn { + &:hover { + background-color: $button; + } + } + + .btn-secondary { + font-size: 8px; + padding: 10px 5px; + background-color: $lightgrey; + color: $textgrey; + &:hover { + background-color: $lightgrey; + } + } + + .icon-btn { + background-color: #ffffff; + padding: 10px; + border-radius: 50%; + color: $button; + border: 1px solid $button; + } + + .ai-warning { + padding: 10px 0; + font-size: 10px; + color: $lighttextgrey; + border-top: 1px solid $greyborder; + } + + .highlighted-text { + background-color: $highlightedText; + } +} + +// Typist CSS +.Typist .Cursor { + display: inline-block; +} +.Typist .Cursor--blinking { + opacity: 1; + animation: blink 1s linear infinite; +} + +@keyframes blink { + 0% { + opacity: 1; + } + 50% { + opacity: 0; + } + 100% { + opacity: 1; + } +} diff --git a/src/client/views/pdf/GPTPopup/GPTPopup.tsx b/src/client/views/pdf/GPTPopup/GPTPopup.tsx new file mode 100644 index 000000000..cc0072a94 --- /dev/null +++ b/src/client/views/pdf/GPTPopup/GPTPopup.tsx @@ -0,0 +1,188 @@ +import React = require('react'); +import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; +import { action, observable } from 'mobx'; +import { observer } from 'mobx-react'; +import ReactLoading from 'react-loading'; +import Typist from 'react-typist'; +import { Doc } from '../../../../fields/Doc'; +import { Docs } from '../../../documents/Documents'; +import './GPTPopup.scss'; + +export enum GPTPopupMode { + SUMMARY, + EDIT, +} + +interface GPTPopupProps { + visible: boolean; + text: string; + loading: boolean; + mode: GPTPopupMode; + callSummaryApi: (e: React.PointerEvent) => Promise<void>; + callEditApi: (e: React.PointerEvent) => Promise<void>; + replaceText: (replacement: string) => void; + highlightRange?: number[]; +} + +@observer +export class GPTPopup extends React.Component<GPTPopupProps> { + static Instance: GPTPopup; + + @observable + private done: boolean = false; + @observable + private sidebarId: string = ''; + + @action + public setDone = (done: boolean) => { + this.done = done; + }; + @action + public setSidebarId = (id: string) => { + this.sidebarId = id; + }; + + public addDoc: (doc: Doc | Doc[], sidebarKey?: string | undefined) => boolean = () => false; + + /** + * Transfers the summarization text to a sidebar annotation text document. + */ + private transferToText = () => { + const newDoc = Docs.Create.TextDocument(this.props.text.trim(), { + _width: 200, + _height: 50, + _fitWidth: true, + _autoHeight: true, + }); + this.addDoc(newDoc, this.sidebarId); + }; + + constructor(props: GPTPopupProps) { + super(props); + GPTPopup.Instance = this; + } + + componentDidUpdate = () => { + if (this.props.loading) { + this.setDone(false); + } + }; + + summaryBox = () => ( + <> + <div> + {this.heading('SUMMARY')} + <div className="content-wrapper"> + {!this.props.loading && + (!this.done ? ( + <Typist + key={this.props.text} + avgTypingDelay={15} + cursor={{ hideWhenDone: true }} + onTypingDone={() => { + setTimeout(() => { + this.setDone(true); + }, 500); + }}> + {this.props.text} + </Typist> + ) : ( + this.props.text + ))} + </div> + </div> + {!this.props.loading && ( + <div className="btns-wrapper"> + {this.done ? ( + <> + <button className="icon-btn" onPointerDown={e => this.props.callSummaryApi(e)}> + <FontAwesomeIcon icon="redo-alt" size="lg" /> + </button> + <button + className="text-btn" + onClick={e => { + this.transferToText(); + }}> + Transfer to Text + </button> + </> + ) : ( + <div className="summarizing"> + <span>Summarizing</span> + <ReactLoading type="bubbles" color="#bcbcbc" width={20} height={20} /> + <button + className="btn-secondary" + onClick={e => { + this.setDone(true); + }}> + Stop Animation + </button> + </div> + )} + </div> + )} + </> + ); + + editBox = () => { + const hr = this.props.highlightRange; + return ( + <> + <div> + {this.heading('TEXT EDIT SUGGESTIONS')} + <div className="content-wrapper"> + {hr && ( + <div> + {this.props.text.slice(0, hr[0])} <span className="highlighted-text">{this.props.text.slice(hr[0], hr[1])}</span> {this.props.text.slice(hr[1])} + </div> + )} + </div> + </div> + {hr && !this.props.loading && ( + <> + <div className="btns-wrapper"> + <> + <button className="icon-btn" onPointerDown={e => this.props.callEditApi(e)}> + <FontAwesomeIcon icon="redo-alt" size="lg" /> + </button> + <button + className="text-btn" + onClick={e => { + this.props.replaceText(this.props.text); + }}> + Replace Text + </button> + </> + </div> + {this.aiWarning()} + </> + )} + </> + ); + }; + + aiWarning = () => + this.done ? ( + <div className="ai-warning"> + <FontAwesomeIcon icon="exclamation-circle" size="sm" style={{ paddingRight: '5px' }} /> + AI generated responses can contain inaccurate or misleading content. + </div> + ) : ( + <></> + ); + + heading = (headingText: string) => ( + <div className="summary-heading"> + <label className="summary-text">{headingText}</label> + {this.props.loading && <ReactLoading type="spin" color="#bcbcbc" width={14} height={14} />} + </div> + ); + + render() { + return ( + <div className="summary-box" style={{ display: this.props.visible ? 'flex' : 'none' }}> + {this.props.mode === GPTPopupMode.SUMMARY ? this.summaryBox() : this.editBox()} + </div> + ); + } +} diff --git a/src/client/views/pdf/PDFViewer.tsx b/src/client/views/pdf/PDFViewer.tsx index b9a22473a..68241e61f 100644 --- a/src/client/views/pdf/PDFViewer.tsx +++ b/src/client/views/pdf/PDFViewer.tsx @@ -24,6 +24,7 @@ import { AnchorMenu } from './AnchorMenu'; import { Annotation } from './Annotation'; import './PDFViewer.scss'; import React = require('react'); +import { GPTPopup } from './GPTPopup/GPTPopup'; const PDFJSViewer = require('pdfjs-dist/web/pdf_viewer'); const pdfjsLib = require('pdfjs-dist'); const _global = (window /* browser */ || global) /* node */ as any; @@ -40,9 +41,10 @@ interface IViewerProps extends FieldViewProps { fieldKey: string; pdf: Pdfjs.PDFDocumentProxy; url: string; + sidebarAddDoc: (doc: Doc | Doc[], sidebarKey?: string | undefined) => boolean; loaded?: (nw: number, nh: number, np: number) => void; setPdfViewer: (view: PDFViewer) => void; - anchorMenuClick?: () => undefined | ((anchor: Doc) => void); + anchorMenuClick?: () => undefined | ((anchor: Doc, summarize?: boolean) => void); crop: (region: Doc | undefined, addCrop?: boolean) => Doc | undefined; } @@ -409,10 +411,18 @@ export class PDFViewer extends React.Component<IViewerProps> { document.removeEventListener('pointerup', this.onSelectEnd); const sel = window.getSelection(); + if (sel) { + AnchorMenu.Instance.setSelectedText(sel.toString()); + } + if (sel?.type === 'Range') { this.createTextAnnotation(sel, sel.getRangeAt(0)); AnchorMenu.Instance.jumpTo(e.clientX, e.clientY); } + + // Changing which document to add the annotation to (the currently selected PDF) + GPTPopup.Instance.setSidebarId('data-sidebar'); + GPTPopup.Instance.addDoc = this.props.sidebarAddDoc; }; @action |