diff options
Diffstat (limited to 'src/client/views/nodes/formattedText/FormattedTextBox.tsx')
-rw-r--r-- | src/client/views/nodes/formattedText/FormattedTextBox.tsx | 89 |
1 files changed, 57 insertions, 32 deletions
diff --git a/src/client/views/nodes/formattedText/FormattedTextBox.tsx b/src/client/views/nodes/formattedText/FormattedTextBox.tsx index 212d24165..9f4483e8d 100644 --- a/src/client/views/nodes/formattedText/FormattedTextBox.tsx +++ b/src/client/views/nodes/formattedText/FormattedTextBox.tsx @@ -70,6 +70,7 @@ import { schema } from './schema_rts'; import { SummaryView } from './SummaryView'; import applyDevTools = require('prosemirror-dev-tools'); import React = require('react'); +import { GPTPopup, GPTPopupMode } from '../../pdf/GPTPopup/GPTPopup'; const translateGoogleApi = require('translate-google-api'); export const GoogleRef = 'googleDocId'; type PullHandler = (exportState: Opt<GoogleApiClientUtils.Docs.ImportResult>, dataDoc: Doc) => void; @@ -896,12 +897,13 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps this._downX = this._downY = Number.NaN; }; - animateRes = (resIndex: number) => { - console.log(this.dataDoc.text); - if (resIndex < this.gptRes.length) { - this.dataDoc.text = (this.dataDoc.text as RichTextField)?.Text + this.gptRes[resIndex]; + animateRes = (resIndex: number, newText: string) => { + if (resIndex < newText.length) { + const marks = this._editorView?.state.storedMarks ?? []; + // if (!marks) return; + this._editorView?.dispatch(this._editorView.state.tr.setStoredMarks(marks).insertText(newText[resIndex]).setStoredMarks(marks)); setTimeout(() => { - this.animateRes(resIndex + 1); + this.animateRes(resIndex + 1, newText); }, 20); } }; @@ -912,47 +914,68 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps // the._editorView.dispatch(state.tr.setSelection(updated).insertText('\n, to)) askGPT = action(async () => { + // const state = this._editorView?.state; + // if (!state) return; + // const to = state.selection.to; + // const updated = TextSelection.create(state.doc, to, to); + // this._editorView?.dispatch(state.tr.setSelection(updated).insertText('\n', to)); + // this._editorView?.dispatch(this._editorView.state.tr.setStoredMarks(marks).insertText('\nTesting').setStoredMarks(marks)); + // console.log('After ', this._editorView?.state.storedMarks); try { let res = await gptAPICall((this.dataDoc.text as RichTextField)?.Text, GPTCallType.COMPLETION); - if (res) { - this.gptRes = res; - this.animateRes(0); + if (!res) { + console.error('GPT call failed'); + this.animateRes(0, 'Something went wrong.'); + } else { + this.animateRes(0, res); } } catch (err) { - console.log(err); - this.dataDoc.text = (this.dataDoc.text as RichTextField)?.Text + 'Something went wrong'; + console.error('GPT call failed'); + this.animateRes(0, 'Something went wrong.'); } }); generateImage = async () => { console.log('Generate image from text: ', (this.dataDoc.text as RichTextField)?.Text); + GPTPopup.Instance?.setImgTargetDoc(this.rootDoc); + GPTPopup.Instance.setImgUrls([]); + GPTPopup.Instance.setMode(GPTPopupMode.IMAGE); + GPTPopup.Instance.setVisible(true); + GPTPopup.Instance.addToCollection = this.props.addDocument; + GPTPopup.Instance.setLoading(true); + try { - let image_url = await gptImageCall((this.dataDoc.text as RichTextField)?.Text); - if (image_url) { - const [result] = await Networking.PostToServer('/uploadRemoteImage', { sources: [image_url] }); + // make this support multiple images + let image_urls = await gptImageCall((this.dataDoc.text as RichTextField)?.Text); + console.log(image_urls); + if (image_urls) { + const [result] = await Networking.PostToServer('/uploadRemoteImage', { sources: [image_urls[0]] }); const source = Utils.prepend(result.accessPaths.agnostic.client); - const newDoc = Docs.Create.ImageDocument(source, { - x: NumCast(this.rootDoc.x) + NumCast(this.layoutDoc._width) + 10, - y: NumCast(this.rootDoc.y), - _height: 200, - _width: 200, - data_nativeWidth: result.nativeWidth, - data_nativeHeight: result.nativeHeight, - }); - if (Doc.IsInMyOverlay(this.rootDoc)) { - newDoc.overlayX = this.rootDoc.x; - newDoc.overlayY = NumCast(this.rootDoc.y) + NumCast(this.rootDoc._height); - Doc.AddToMyOverlay(newDoc); - } else { - this.props.addDocument?.(newDoc); - } - // Create link between prompt and image - DocUtils.MakeLink(this.rootDoc, newDoc, { link_relationship: 'Image Prompt' }); + GPTPopup.Instance.setImgUrls([source]); + + // const newDoc = Docs.Create.ImageDocument(source, { + // x: NumCast(this.rootDoc.x) + NumCast(this.layoutDoc._width) + 10, + // y: NumCast(this.rootDoc.y), + // _height: 200, + // _width: 200, + // data_nativeWidth: result.nativeWidth, + // data_nativeHeight: result.nativeHeight, + // }); + // if (Doc.IsInMyOverlay(this.rootDoc)) { + // newDoc.overlayX = this.rootDoc.x; + // newDoc.overlayY = NumCast(this.rootDoc.y) + NumCast(this.rootDoc._height); + // Doc.AddToMyOverlay(newDoc); + // } else { + // this.props.addDocument?.(newDoc); + // } + // // Create link between prompt and image + // DocUtils.MakeLink(this.rootDoc, newDoc, { link_relationship: 'Image Prompt' }); } } catch (err) { console.log(err); return ''; } + GPTPopup.Instance.setLoading(false); }; breakupDictation = () => { @@ -1249,8 +1272,10 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps } // Accessing editor and text doc for gpt assisted text edits if (this._editorView && selected) { - AnchorMenu.Instance?.setEditorView(this._editorView); - AnchorMenu.Instance?.setTextDoc(this.dataDoc); + console.log('Setting'); + GPTPopup.Instance?.setTextAnchor(this.getAnchor(false)); + // AnchorMenu.Instance?.setEditorView(this._editorView); + // AnchorMenu.Instance?.setTextDoc(this.dataDoc); } }), { fireImmediately: true } |