aboutsummaryrefslogtreecommitdiff
path: root/src/client/views/nodes
diff options
context:
space:
mode:
Diffstat (limited to 'src/client/views/nodes')
-rw-r--r--src/client/views/nodes/ImageBox.tsx2
-rw-r--r--src/client/views/nodes/PDFBox.tsx1
-rw-r--r--src/client/views/nodes/WebBox.tsx23
-rw-r--r--src/client/views/nodes/formattedText/FormattedTextBox.scss4
-rw-r--r--src/client/views/nodes/formattedText/FormattedTextBox.tsx79
5 files changed, 107 insertions, 2 deletions
diff --git a/src/client/views/nodes/ImageBox.tsx b/src/client/views/nodes/ImageBox.tsx
index c6d4cb694..60f799463 100644
--- a/src/client/views/nodes/ImageBox.tsx
+++ b/src/client/views/nodes/ImageBox.tsx
@@ -344,7 +344,7 @@ export class ImageBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProp
@computed get nativeSize() {
TraceMobx();
const nativeWidth = NumCast(this.dataDoc[this.fieldKey + '-nativeWidth'], NumCast(this.layoutDoc[this.fieldKey + '-nativeWidth'], 500));
- const nativeHeight = NumCast(this.dataDoc[this.fieldKey + '-nativeHeight'], NumCast(this.layoutDoc[this.fieldKey + '-nativeHeight'], 1));
+ const nativeHeight = NumCast(this.dataDoc[this.fieldKey + '-nativeHeight'], NumCast(this.layoutDoc[this.fieldKey + '-nativeHeight'], 500));
const nativeOrientation = NumCast(this.dataDoc[this.fieldKey + '-nativeOrientation'], 1);
return { nativeWidth, nativeHeight, nativeOrientation };
}
diff --git a/src/client/views/nodes/PDFBox.tsx b/src/client/views/nodes/PDFBox.tsx
index 5f207cc0d..7e0b5c4d3 100644
--- a/src/client/views/nodes/PDFBox.tsx
+++ b/src/client/views/nodes/PDFBox.tsx
@@ -492,6 +492,7 @@ export class PDFBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProps
}}>
<PDFViewer
{...this.props}
+ sidebarAddDoc={this.sidebarAddDocument}
rootDoc={this.rootDoc}
addDocTab={this.sidebarAddDocTab}
layoutDoc={this.layoutDoc}
diff --git a/src/client/views/nodes/WebBox.tsx b/src/client/views/nodes/WebBox.tsx
index b1acef44a..5f9420a54 100644
--- a/src/client/views/nodes/WebBox.tsx
+++ b/src/client/views/nodes/WebBox.tsx
@@ -14,7 +14,6 @@ import { TraceMobx } from '../../../fields/util';
import { emptyFunction, getWordAtPoint, OmitKeys, returnFalse, returnOne, setupMoveUpEvents, smoothScroll, StopEvent, Utils } from '../../../Utils';
import { Docs, DocUtils } from '../../documents/Documents';
import { DocumentManager } from '../../util/DocumentManager';
-import { DragManager } from '../../util/DragManager';
import { ScriptingGlobals } from '../../util/ScriptingGlobals';
import { SnappingManager } from '../../util/SnappingManager';
import { undoBatch, UndoManager } from '../../util/UndoManager';
@@ -36,6 +35,9 @@ import { LinkDocPreview } from './LinkDocPreview';
import { PinProps, PresBox } from './trails';
import './WebBox.scss';
import React = require('react');
+import { DragManager } from '../../util/DragManager';
+import { gptAPICall, GPTCallType } from '../../apis/gpt/Summarization';
+import { GPTPopup } from '../pdf/GPTPopup';
const { CreateImage } = require('./WebBoxRenderer');
const _global = (window /* browser */ || global) /* node */ as any;
const htmlToText = require('html-to-text');
@@ -58,6 +60,19 @@ export class WebBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProps
private _searchRef = React.createRef<HTMLInputElement>();
private _searchString = '';
+ // GPT Additions
+ private _summaryText: string = '';
+ setSummaryText = async () => {
+ try {
+ const summary = await gptAPICall(this.selectionText(), GPTCallType.SUMMARY);
+ this._summaryText = `Summary: ${summary}`;
+ } catch (err) {
+ console.log(err);
+ this._summaryText = 'Failed to fetch summary.';
+ }
+ };
+ summaryText = () => this._summaryText;
+
private get _getAnchor() {
return AnchorMenu.Instance?.GetAnchor;
}
@@ -359,8 +374,13 @@ export class WebBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProps
const scale = (this.props.NativeDimScaling?.() || 1) * mainContBounds.scale;
const sel = this._iframe.contentWindow.getSelection();
if (sel) {
+ this._selectionText = sel.toString();
+ AnchorMenu.Instance.setSelectedText(sel.toString());
this._textAnnotationCreator = () => this.createTextAnnotation(sel, !sel.isCollapsed ? sel.getRangeAt(0) : undefined);
AnchorMenu.Instance.jumpTo(e.clientX * scale + mainContBounds.translateX, e.clientY * scale + mainContBounds.translateY - NumCast(this.layoutDoc._scrollTop) * scale);
+ // Changing which document to add the annotation to (the currently selected WebBox)
+ GPTPopup.Instance.setSidebarId(`${this.props.fieldKey}-${this._urlHash}-sidebar`);
+ GPTPopup.Instance.addDoc = this.sidebarAddDocument;
}
}
};
@@ -778,6 +798,7 @@ export class WebBox extends ViewBoxAnnotatableComponent<ViewBoxAnnotatableProps
}
addDocumentWrapper = (doc: Doc | Doc[], annotationKey?: string) => {
+ console.log(annotationKey);
(doc instanceof Doc ? [doc] : doc).forEach(doc => (doc.webUrl = this._url));
return this.addDocument(doc, annotationKey);
};
diff --git a/src/client/views/nodes/formattedText/FormattedTextBox.scss b/src/client/views/nodes/formattedText/FormattedTextBox.scss
index cbe0a465d..fd7fbb333 100644
--- a/src/client/views/nodes/formattedText/FormattedTextBox.scss
+++ b/src/client/views/nodes/formattedText/FormattedTextBox.scss
@@ -149,6 +149,10 @@ audiotag:hover {
}
}
+.gpt-typing-wrapper {
+ padding: 10px;
+}
+
// .menuicon {
// display: inline-block;
// border-right: 1px solid rgba(0, 0, 0, 0.2);
diff --git a/src/client/views/nodes/formattedText/FormattedTextBox.tsx b/src/client/views/nodes/formattedText/FormattedTextBox.tsx
index d857b150c..60be9775c 100644
--- a/src/client/views/nodes/formattedText/FormattedTextBox.tsx
+++ b/src/client/views/nodes/formattedText/FormattedTextBox.tsx
@@ -63,6 +63,9 @@ import { schema } from './schema_rts';
import { SummaryView } from './SummaryView';
import applyDevTools = require('prosemirror-dev-tools');
import React = require('react');
+import { Configuration, OpenAIApi } from 'openai';
+import { Networking } from '../../../Network';
+import { gptAPICall, GPTCallType } from '../../../apis/gpt/Summarization';
import { PinProps, PresBox } from '../trails';
const translateGoogleApi = require('translate-google-api');
@@ -170,6 +173,10 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps
};
}
+ // State for GPT
+ @observable
+ private gptRes: string = '';
+
public static PasteOnLoad: ClipboardEvent | undefined;
public static SelectOnLoad = '';
public static DontSelectInitialText = false; // whether initial text should be selected or not
@@ -844,12 +851,84 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps
const options = cm.findByDescription('Options...');
const optionItems = options && 'subitems' in options ? options.subitems : [];
+ optionItems.push({ description: `Generate Dall-E Image`, event: () => this.generateImage(), icon: 'star' });
+ optionItems.push({ description: `Ask GPT-3`, event: () => this.askGPT(), icon: 'lightbulb' });
optionItems.push({ description: !this.Document._singleLine ? 'Make Single Line' : 'Make Multi Line', event: () => (this.layoutDoc._singleLine = !this.layoutDoc._singleLine), icon: !this.Document._singleLine ? 'grip-lines' : 'bars' });
optionItems.push({ description: `${this.Document._autoHeight ? 'Lock' : 'Auto'} Height`, event: () => (this.layoutDoc._autoHeight = !this.layoutDoc._autoHeight), icon: this.Document._autoHeight ? 'lock' : 'unlock' });
!options && cm.addItem({ description: 'Options...', subitems: optionItems, icon: 'eye' });
this._downX = this._downY = Number.NaN;
};
+ mockGPT = async (): Promise<string> => {
+ return new Promise((resolve, reject) => {
+ setTimeout(() => {
+ resolve('Mock GPT Call.');
+ }, 2000);
+ });
+ };
+
+ animateRes = (resIndex: number) => {
+ if (resIndex < this.gptRes.length) {
+ this.dataDoc.text = (this.dataDoc.text as RichTextField)?.Text + this.gptRes[resIndex];
+ setTimeout(() => {
+ this.animateRes(resIndex + 1);
+ }, 20);
+ }
+ };
+
+ askGPT = action(async () => {
+ try {
+ let res = await gptAPICall((this.dataDoc.text as RichTextField)?.Text, GPTCallType.COMPLETION);
+ // let res = await this.mockGPT();
+ if (res) {
+ this.gptRes = res;
+ this.animateRes(0);
+ }
+ } catch (err) {
+ console.log(err);
+ this.dataDoc.text = (this.dataDoc.text as RichTextField)?.Text + 'Something went wrong';
+ }
+ });
+
+ generateImage = async () => {
+ console.log('Generate image from text: ', (this.dataDoc.text as RichTextField)?.Text);
+ try {
+ const configuration = new Configuration({
+ apiKey: process.env.OPENAI_KEY,
+ });
+ const openai = new OpenAIApi(configuration);
+ const response = await openai.createImage({
+ prompt: (this.dataDoc.text as RichTextField)?.Text,
+ n: 1,
+ size: '1024x1024',
+ });
+ let image_url = response.data.data[0].url;
+ console.log(image_url);
+ if (image_url) {
+ const [{ accessPaths }] = await Networking.PostToServer('/uploadRemoteImage', { sources: [image_url] });
+ const source = Utils.prepend(accessPaths.agnostic.client);
+ const newDoc = Docs.Create.ImageDocument(source, {
+ x: NumCast(this.rootDoc.x) + NumCast(this.layoutDoc._width) + 10,
+ y: NumCast(this.rootDoc.y),
+ _height: 200,
+ _width: 200,
+ })
+ if (DocListCast(Doc.MyOverlayDocs?.data).includes(this.rootDoc)) {
+ newDoc.overlayX = this.rootDoc.x;
+ newDoc.overlayY = NumCast(this.rootDoc.y) + NumCast(this.rootDoc._height);
+ Doc.AddDocToList(Doc.MyOverlayDocs, undefined, newDoc);
+ } else {
+ this.props.addDocument?.(newDoc);
+ }
+ // Create link between prompt and image
+ DocUtils.MakeLink({doc: this.rootDoc}, {doc: newDoc}, "Image Prompt");
+ }
+ } catch (err) {
+ console.log(err);
+ return '';
+ }
+ };
+
breakupDictation = () => {
if (this._editorView && this._recording) {
this.stopDictation(true);