diff options
Diffstat (limited to 'src')
-rw-r--r-- | src/client/apis/gpt/GPT.ts | 9 | ||||
-rw-r--r-- | src/client/views/nodes/ComparisonBox.tsx | 505 | ||||
-rw-r--r-- | src/client/views/nodes/ImageBox.tsx | 2 | ||||
-rw-r--r-- | src/client/views/nodes/formattedText/FormattedTextBox.tsx | 48 | ||||
-rw-r--r-- | src/client/views/pdf/AnchorMenu.tsx | 2 | ||||
-rw-r--r-- | src/client/views/pdf/PDFViewer.tsx | 138 |
6 files changed, 559 insertions, 145 deletions
diff --git a/src/client/apis/gpt/GPT.ts b/src/client/apis/gpt/GPT.ts index 7bcd541c7..caddc41a2 100644 --- a/src/client/apis/gpt/GPT.ts +++ b/src/client/apis/gpt/GPT.ts @@ -12,6 +12,7 @@ enum GPTCallType { DESCRIBE = 'describe', MERMAID = 'mermaid', DATA = 'data', + STACK = 'stack', } type GPTCallOpts = { @@ -26,6 +27,12 @@ const callTypeMap: { [type: string]: GPTCallOpts } = { summary: { model: 'gpt-4-turbo', maxTokens: 256, temp: 0.5, prompt: 'Summarize the text given in simpler terms.' }, edit: { model: 'gpt-4-turbo', maxTokens: 256, temp: 0.5, prompt: 'Reword the text.' }, flashcard: { model: 'gpt-4-turbo', maxTokens: 512, temp: 0.5, prompt: 'Make flashcards out of this text with each question and answer labeled as question and answer. Do not label each flashcard and do not include asterisks: ' }, + stack: { + model: 'gpt-4o', + maxTokens: 2048, + temp: 0.7, + prompt: 'Create a stack of flashcards out of this text with each question and answer labeled as question and answer. For some questions, ask "what is this image of" and write a keyword that represents the image and label it "keyword". Otherwise, write none. Do not label each flashcard and do not include asterisks.', + }, completion: { model: 'gpt-4-turbo', maxTokens: 256, temp: 0.5, prompt: "You are a helpful assistant. Answer the user's prompt." }, mermaid: { model: 'gpt-4-turbo', @@ -64,7 +71,7 @@ let lastResp = ''; * @returns AI Output */ const gptAPICall = async (inputTextIn: string, callType: GPTCallType, prompt?: any) => { - const inputText = [GPTCallType.SUMMARY, GPTCallType.FLASHCARD, GPTCallType.QUIZ].includes(callType) ? inputTextIn + '.' : inputTextIn; + const inputText = [GPTCallType.SUMMARY, GPTCallType.FLASHCARD, GPTCallType.QUIZ, GPTCallType.STACK].includes(callType) ? inputTextIn + '.' : inputTextIn; const opts: GPTCallOpts = callTypeMap[callType]; if (lastCall === inputText) return lastResp; try { diff --git a/src/client/views/nodes/ComparisonBox.tsx b/src/client/views/nodes/ComparisonBox.tsx index 3d33ff862..06ecf8893 100644 --- a/src/client/views/nodes/ComparisonBox.tsx +++ b/src/client/views/nodes/ComparisonBox.tsx @@ -9,7 +9,7 @@ import { Doc, Opt } from '../../../fields/Doc'; import { DocData } from '../../../fields/DocSymbols'; import { RichTextField } from '../../../fields/RichTextField'; import { DocCast, NumCast, RTFCast, StrCast, toList } from '../../../fields/Types'; -import { GPTCallType, gptAPICall } from '../../apis/gpt/GPT'; +import { GPTCallType, gptAPICall, gptImageLabel } from '../../apis/gpt/GPT'; import '../pdf/GPTPopup/GPTPopup.scss'; import { DocUtils } from '../../documents/DocUtils'; import { DocumentType } from '../../documents/DocumentTypes'; @@ -29,7 +29,12 @@ import { ContextMenu } from '../ContextMenu'; import { ContextMenuProps } from '../ContextMenuItem'; import { tickStep } from 'd3'; import { CollectionCarouselView } from '../collections/CollectionCarouselView'; +import { FollowLinkScript } from '../../documents/DocUtils'; +import { schema } from '../nodes/formattedText/schema_rts'; +import { Id } from '../../../fields/FieldSymbols'; +import axios from 'axios'; +const API_URL = 'https://api.unsplash.com/search/photos'; @observer export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { public static LayoutString(fieldKey: string) { @@ -39,36 +44,110 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() constructor(props: FieldViewProps) { super(props); makeObservable(this); + this.setListening(); } @observable private _inputValue = ''; @observable private _outputValue = ''; @observable private _loading = false; @observable private _isEmpty = false; + @observable childActive = false; @observable _yRelativeToTop: boolean = true; - - @action handleInputChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => { - this._inputValue = e.target.value; - console.log(this._inputValue); - }; - @observable _animating = ''; + private _ref = React.createRef<HTMLDivElement>(); - @computed get clipWidth() { - return NumCast(this.layoutDoc[this.clipWidthKey], 50); + get revealOp() { + return this.layoutDoc[`_${this._props.fieldKey}_revealOp`]; + } + get clipHeightKey() { + return '_' + this._props.fieldKey + '_clipHeight'; } + get clipWidthKey() { return '_' + this._props.fieldKey + '_clipWidth'; } + @computed get clipWidth() { + return NumCast(this.layoutDoc[this.clipWidthKey], 50); + } + @computed get clipHeight() { return NumCast(this.layoutDoc[this.clipHeightKey], 200); } - get revealOp() { - return this.layoutDoc[`_${this._props.fieldKey}_revealOp`]; + + @computed get overlayAlternateIcon() { + const usepath = this.layoutDoc[`_${this._props.fieldKey}_usePath`]; + return ( + <Tooltip title={<div className="dash-tooltip">flip</div>}> + <div + className="formattedTextBox-alternateButton" + onPointerDown={e => + setupMoveUpEvents(e.target, e, returnFalse, emptyFunction, () => { + if (!this.layoutDoc[`_${this._props.fieldKey}_revealOp`] || this.layoutDoc[`_${this._props.fieldKey}_revealOp`] === 'flip') { + this.flipFlashcard(); + + // console.log('Print Front of cards: ' + (RTFCast(DocCast(this.dataDoc[this.fieldKey + '_0']).text)?.Text ?? '')); + // console.log('Print Back of cards: ' + (RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text ?? '')); + } + }) + } + style={{ + background: this.revealOp === 'hover' ? 'gray' : usepath === 'alternate' ? 'white' : 'black', + color: this.revealOp === 'hover' ? 'black' : usepath === 'alternate' ? 'black' : 'white', + display: 'inline-block', + }}> + <div key="alternate" className="formattedTextBox-flip"> + <FontAwesomeIcon icon="turn-up" size="1x" /> + </div> + </div> + </Tooltip> + ); } - get clipHeightKey() { - return '_' + this._props.fieldKey + '_clipHeight'; + + @computed get flashcardMenu() { + return ( + <div> + <Tooltip + title={ + this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate' ? ( + <div className="dash-tooltip">Flip to front side to use GPT</div> + ) : ( + <div className="dash-tooltip">Ask GPT to create an answer on the back side of the flashcard based on your question on the front</div> + ) + }> + <div style={{ position: 'absolute', bottom: '3px', right: '50px', cursor: 'pointer' }} onPointerDown={e => (!this.layoutDoc[`_${this._props.fieldKey}_usePath`] ? this.findImageTags() : null)}> + <FontAwesomeIcon icon="lightbulb" size="xl" /> + </div> + </Tooltip> + {DocCast(this.Document.embedContainer).type_collection === 'carousel' ? null : ( + <div> + <Tooltip title={<div>Create a flashcard pile</div>}> + <div style={{ position: 'absolute', bottom: '3px', right: '74px', cursor: 'pointer' }} onPointerDown={e => this.createFlashcardPile([this.Document], false)}> + <FontAwesomeIcon icon="folder-plus" size="xl" /> + </div> + </Tooltip> + <Tooltip title={<div className="dash-tooltip">Create new flashcard stack based on text</div>}> + <div style={{ position: 'absolute', bottom: '3px', right: '104px', cursor: 'pointer' }} onClick={e => this.gptFlashcardPile()}> + <FontAwesomeIcon icon="layer-group" size="xl" /> + </div> + </Tooltip> + </div> + )} + <Tooltip title={<div className="dash-tooltip">Hover to reveal</div>}> + <div style={{ position: 'absolute', bottom: '3px', right: '25px', cursor: 'pointer' }} onClick={e => this.handleHover()}> + <FontAwesomeIcon color={this.revealOp === 'hover' ? 'blue' : 'black'} icon="hand-point-up" size="xl" /> + </div> + </Tooltip> + {/* <Tooltip title={<div className="dash-tooltip">Remove this side of the flashcard</div>}> + <div + style={{ position: 'absolute', bottom: '3px', right: '80px', cursor: 'pointer' }} + onPointerDown={e => this.closeDown(e, this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate' ? this._props.fieldKey + '_1' : this._props.fieldKey + '_0')}> + <FontAwesomeIcon color={this.revealOp === 'hover' ? 'blue' : 'black'} icon="trash-can" size="xl" /> + </div> + </Tooltip> */} + {/* {this.overlayAlternateIcon} */} + </div> + ); } componentDidMount() { @@ -80,6 +159,7 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() } protected createDropTarget = (ele: HTMLDivElement | null, fieldKey: string, disposerId: number) => { this._disposers[disposerId]?.(); + // this.childActive = true; if (ele) { this._disposers[disposerId] = DragManager.MakeDropTarget(ele, (e, dropEvent) => this.internalDrop(e, dropEvent, fieldKey), this.layoutDoc); } @@ -147,6 +227,39 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() return false; }; + @action handleInputChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => { + this._inputValue = e.target.value; + console.log(this._inputValue); + }; + + // this.closeDown(e, this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate' ? this.fieldKey + '_0' : this.fieldKey + '_1')} + @action activateContent = () => { + this.childActive = true; + }; + + @action handleRenderGPTClick = () => { + // Call the GPT model and get the output + this.layoutDoc[`_${this._props.fieldKey}_usePath`] = 'alternate'; + this._outputValue = ''; + if (this._inputValue) this.askGPT(GPTCallType.QUIZ); + }; + + @action handleHover = () => { + if (this.revealOp === 'hover') { + this.layoutDoc[`_${this._props.fieldKey}_revealOp`] = 'flip'; + this.Document.forceActive = false; + } else { + this.layoutDoc[`_${this._props.fieldKey}_revealOp`] = 'hover'; + this.Document.forceActive = true; + } + //this.revealOp === 'hover' ? (this.layoutDoc[`_${this._props.fieldKey}_revealOp`] = 'flip') : (this.layoutDoc[`_${this._props.fieldKey}_revealOp`] = 'hover'); + }; + + @action handleRenderClick = () => { + // Call the GPT model and get the output + this.layoutDoc[`_${this._props.fieldKey}_usePath`] = undefined; + }; + getAnchor = (addAsAnnotation: boolean, pinProps?: PinProps) => { const anchor = Docs.Create.ConfigDocument({ title: 'CompareAnchor:' + this.Document.title, @@ -271,13 +384,29 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() } gptFlashcardPile = async () => { - var text = await this.askGPT(GPTCallType.FLASHCARD); + var text = await this.askGPT(GPTCallType.STACK); + console.log(text); var senArr = text?.split('Question: '); var collectionArr: Doc[] = []; for (let i = 1; i < senArr?.length!; i++) { const newDoc = Docs.Create.ComparisonDocument(senArr![i], { _layout_isFlashcard: true, _width: 300, _height: 300 }); - newDoc.text = senArr![i]; + // newDoc.text = senArr![i]; + // const dataSplit = StrCast(this.dataDoc.data).includes('Keyword: ') ? StrCast(this.dataDoc.data).split('Keyword: ') : StrCast(this.dataDoc.data).split('Answer: '); + // newDoc[this.fieldKey + '_0'] = Docs.Create.TextDocument(dataSplit[1]); + // newDoc[this.fieldKey + '_1'] = Docs.Create.TextDocument(dataSplit[0]); + // newDoc['backgroundColor'] = 'lightgray'; + // newDoc.image = + if (StrCast(senArr![i]).includes('Keyword: ')) { + console.log('Here'); + const img = await this.fetchImages(StrCast(senArr![i]).split('Keyword: ')[1]); + console.log(img); + DocCast(newDoc).image = img; + // DocCast(DocCast(newDoc).dataDoc)['image'] = img; + Doc.AddToMyOverlay(img); + } + + console.log('ARR' + i + senArr![i]); collectionArr.push(newDoc); } this.createFlashcardPile(collectionArr, true); @@ -297,110 +426,6 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() hoverFlip = (side: string | undefined) => { if (this.layoutDoc[`_${this._props.fieldKey}_revealOp`] === 'hover') this.layoutDoc[`_${this._props.fieldKey}_usePath`] = side; }; - /** - * Creates the button used to flip the flashcards. - */ - @computed get overlayAlternateIcon() { - const usepath = this.layoutDoc[`_${this._props.fieldKey}_usePath`]; - return ( - <Tooltip title={<div className="dash-tooltip">flip</div>}> - <div - className="formattedTextBox-alternateButton" - onPointerDown={e => - setupMoveUpEvents(e.target, e, returnFalse, emptyFunction, () => { - if (!this.layoutDoc[`_${this._props.fieldKey}_revealOp`] || this.layoutDoc[`_${this._props.fieldKey}_revealOp`] === 'flip') { - this.flipFlashcard(); - - // console.log('Print Front of cards: ' + (RTFCast(DocCast(this.dataDoc[this.fieldKey + '_0']).text)?.Text ?? '')); - // console.log('Print Back of cards: ' + (RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text ?? '')); - } - }) - } - style={{ - background: this.revealOp === 'hover' ? 'gray' : usepath === 'alternate' ? 'white' : 'black', - color: this.revealOp === 'hover' ? 'black' : usepath === 'alternate' ? 'black' : 'white', - display: 'inline-block', - }}> - <div key="alternate" className="formattedTextBox-flip"> - <FontAwesomeIcon icon="turn-up" size="1x" /> - </div> - </div> - </Tooltip> - ); - } - - @computed get flashcardMenu() { - return ( - <div> - <Tooltip - title={ - this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate' ? ( - <div className="dash-tooltip">Flip to front side to use GPT</div> - ) : ( - <div className="dash-tooltip">Ask GPT to create an answer on the back side of the flashcard based on your question on the front</div> - ) - }> - <div style={{ position: 'absolute', bottom: '3px', right: '50px', cursor: 'pointer' }} onPointerDown={e => (!this.layoutDoc[`_${this._props.fieldKey}_usePath`] ? this.askGPT(GPTCallType.CHATCARD) : null)}> - <FontAwesomeIcon icon="lightbulb" size="xl" /> - </div> - </Tooltip> - {DocCast(this.Document.embedContainer).type_collection === 'carousel' ? null : ( - <div> - <Tooltip title={<div>Create a flashcard pile</div>}> - <div style={{ position: 'absolute', bottom: '3px', right: '74px', cursor: 'pointer' }} onPointerDown={e => this.createFlashcardPile([this.Document], false)}> - <FontAwesomeIcon icon="folder-plus" size="xl" /> - </div> - </Tooltip> - <Tooltip title={<div className="dash-tooltip">Create new flashcard stack based on text</div>}> - <div style={{ position: 'absolute', bottom: '3px', right: '104px', cursor: 'pointer' }} onClick={e => this.gptFlashcardPile()}> - <FontAwesomeIcon icon="layer-group" size="xl" /> - </div> - </Tooltip> - </div> - )} - <Tooltip title={<div className="dash-tooltip">Hover to reveal</div>}> - <div style={{ position: 'absolute', bottom: '3px', right: '25px', cursor: 'pointer' }} onClick={e => this.handleHover()}> - <FontAwesomeIcon color={this.revealOp === 'hover' ? 'blue' : 'black'} icon="hand-point-up" size="xl" /> - </div> - </Tooltip> - {/* <Tooltip title={<div className="dash-tooltip">Remove this side of the flashcard</div>}> - <div - style={{ position: 'absolute', bottom: '3px', right: '80px', cursor: 'pointer' }} - onPointerDown={e => this.closeDown(e, this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate' ? this._props.fieldKey + '_1' : this._props.fieldKey + '_0')}> - <FontAwesomeIcon color={this.revealOp === 'hover' ? 'blue' : 'black'} icon="trash-can" size="xl" /> - </div> - </Tooltip> */} - {/* {this.overlayAlternateIcon} */} - </div> - ); - } - // this.closeDown(e, this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate' ? this.fieldKey + '_0' : this.fieldKey + '_1')} - @action activateContent = () => { - this.childActive = true; - }; - - @action handleRenderGPTClick = () => { - // Call the GPT model and get the output - this.layoutDoc[`_${this._props.fieldKey}_usePath`] = 'alternate'; - this._outputValue = ''; - if (this._inputValue) this.askGPT(GPTCallType.QUIZ); - }; - - @action handleHover = () => { - if (this.revealOp === 'hover') { - this.layoutDoc[`_${this._props.fieldKey}_revealOp`] = 'flip'; - this.Document.forceActive = false; - } else { - this.layoutDoc[`_${this._props.fieldKey}_revealOp`] = 'hover'; - this.Document.forceActive = true; - } - //this.revealOp === 'hover' ? (this.layoutDoc[`_${this._props.fieldKey}_revealOp`] = 'flip') : (this.layoutDoc[`_${this._props.fieldKey}_revealOp`] = 'hover'); - }; - - @action handleRenderClick = () => { - // Call the GPT model and get the output - this.layoutDoc[`_${this._props.fieldKey}_usePath`] = undefined; - }; animateRes = (resIndex: number, newText: string, callType: GPTCallType) => { if (resIndex < newText.length) { @@ -448,15 +473,17 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() if (callType == GPTCallType.CHATCARD) { DocCast(this.dataDoc[this.props.fieldKey + '_0'])[DocData].text = res; // this.flipFlashcard(); - } - if (callType == GPTCallType.QUIZ) this._outputValue = res; + } else if (callType == GPTCallType.QUIZ) this._outputValue = res; // DocCast(this.dataDoc[this.props.fieldKey + '_0'])[DocData].text = res; // this._outputValue = res; else if (callType === GPTCallType.FLASHCARD) { // console.log(res); this._loading = false; return res; + } else if (callType === GPTCallType.STACK) { } + this._loading = false; + return res; // console.log(res); } catch (err) { console.error('GPT call failed'); @@ -470,7 +497,160 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() // const cm = ContextMenu.Instance; // cm.addItem({ description: 'Create an Answer on the Back', event: () => this.askGPT(GPTCallType.CHATCARD), icon: 'pencil' }); // }; - @observable childActive = false; + + findImageTags = async () => { + // const d = DocCast(this.dataDoc[this.props.fieldKey + '_0']); + // const copy = Doc.MakeCopy(this.Document, true); + const c = this.DocumentView?.().ContentDiv!.getElementsByTagName('img'); + // this.ProseRef?.getElementsByTagName('img'); + if (c?.length === 0) await this.askGPT(GPTCallType.CHATCARD); + if (c) { + this._loading = true; + for (let i of c) { + console.log(i); + if (i.className !== 'ProseMirror-separator') await this.getImageDesc(i.src); + } + this._loading = false; + this.flipFlashcard(); + } + // console.log('HI' + this.ProseRef?.getElementsByTagName('img')); + }; + + static imageUrlToBase64 = async (imageUrl: string): Promise<string> => { + try { + const response = await fetch(imageUrl); + const blob = await response.blob(); + + return new Promise((resolve, reject) => { + const reader = new FileReader(); + reader.readAsDataURL(blob); + reader.onloadend = () => resolve(reader.result as string); + reader.onerror = error => reject(error); + }); + } catch (error) { + console.error('Error:', error); + throw error; + } + }; + + getImageDesc = async (u: string) => { + try { + const hrefBase64 = await ComparisonBox.imageUrlToBase64(u); + const response = await gptImageLabel(hrefBase64, 'Answer the following question as a short flashcard response. Do not include a label.' + (this.dataDoc.text as RichTextField)?.Text); + + DocCast(this.dataDoc[this.props.fieldKey + '_0'])[DocData].text = response; + } catch (error) { + console.log('Error'); + } + }; + + fetchImages = async (selection: string) => { + try { + const { data } = await axios.get(`${API_URL}?query=${selection}&page=1&per_page=${1}&client_id=Q4zruu6k6lum2kExiGhLNBJIgXDxD6NNj0SRHH_XXU0`); + console.log(data.results); + const imageSnapshot = Docs.Create.ImageDocument(data.results[0].urls.small, { + _nativeWidth: Doc.NativeWidth(this.layoutDoc), + _nativeHeight: Doc.NativeHeight(this.layoutDoc), + x: NumCast(this.layoutDoc.x), + y: NumCast(this.layoutDoc.y), + onClick: FollowLinkScript(), + _width: 150, + _height: 150, + title: '--snapshot' + NumCast(this.layoutDoc._layout_currentTimecode) + ' image-', + }); + // return imageSnapshot; + imageSnapshot['x'] = this.layoutDoc['x']; + imageSnapshot['y'] = this.layoutDoc['y']; + + // const newDoc = Docs.Create.TextDocument(selection); + // newDoc.text = selection; + // newDoc['backgroundColor'] = 'lightgray'; + + // Doc.AddToMyOverlay(imageSnapshot); + return imageSnapshot; + return data.results[0].urls.small; + // Doc.AddEmbedding(newDoc, imageSnapshot); + // Doc.MakeEmbedding(imageSnapshot); + // return imageSnapshot; + // imageSnapshot['zIndex'] = 20000; + // this._props.DocumentView?.()._props.addDocument?.(newDoc); + } catch (error) { + console.log(error); + } + }; + + // handleSelection = async (selection: string, newDoc: Doc) => { + // const images = await this.fetchImages(selection); + // return images; + // // Doc.AddDocToList(Doc.MyRecentlyClosed, 'data', dashDoc, undefined, true, true); + // images!.embedContainer = newDoc; + // Doc.AddEmbedding(newDoc, images!); + // const c = this.DocumentView?.().ContentDiv!.getElementsByClassName('afterBox-cont'); + // for (let i in c) { + // console.log('HERE' + i); + // } + // this.addDoc(images!, this.fieldKey + '_0'); + // Doc.AddEmbedding(newDoc, images!); + // this._props. + // Doc.AddToMyOverlay(images!); + // const node = schema.nodes.dashDoc.create({ + // width: NumCast(images?._width), + // height: NumCast(images?._height), + // title: 'dashDoc', + // docId: images![Id], + // float: 'unset', + // }); + // }; + + @observable listening = false; + @observable transcriptElement = ''; + SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; + recognition = new this.SpeechRecognition(); + + handleResult = (e: SpeechRecognitionEvent) => { + let interimTranscript = ''; + let finalTranscript = ''; + for (let i = e.resultIndex; i < e.results.length; i++) { + const transcript = e.results[i][0].transcript; + if (e.results[i].isFinal) { + finalTranscript += transcript; + } else { + interimTranscript += transcript; + } + } + console.log(interimTranscript); + this._inputValue += finalTranscript; + }; + + setListening = () => { + const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; + if (SpeechRecognition) { + console.log('here'); + // const recognition = new SpeechRecognition(); + this.recognition.continuous = true; // Continue listening even if the user pauses + this.recognition.interimResults = true; // Show interim results + this.recognition.lang = 'en-US'; // Set language (optional) + this.recognition.onresult = this.handleResult.bind(this); + // recognition.onend = this.handleEnd.bind(this); + + // this.handleResult; + // recognition.stop(); + } + }; + + setLanguage = (language: string) => { + this.recognition.lang = language; + }; + + startListening = () => { + this.recognition.start(); + this.listening = true; + }; + + stopListening = () => { + this.recognition.stop(); + this.listening = false; + }; render() { const clearButton = (which: string) => ( @@ -542,13 +722,62 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() // add text box to each side when comparison box is first created // (!this.dataDoc[this.fieldKey + '_0'] && this.dataDoc[this._props.fieldKey + '_0'] !== 'empty') if (!this.dataDoc[this.fieldKey + '_0'] && !this._isEmpty) { - const dataSplit = StrCast(this.dataDoc.data).split('Answer: '); + const dataSplit = StrCast(this.dataDoc.data).includes('Keyword: ') ? StrCast(this.dataDoc.data).split('Keyword: ') : StrCast(this.dataDoc.data).split('Answer: '); const newDoc = Docs.Create.TextDocument(dataSplit[1]); + if (this.Document.image) DocCast(newDoc).image = DocCast(this.Document.image); + console.log('D' + this.Document.image); + //if (DocCast(DocCast(newDoc).dataDoc)) DocCast(DocCast(newDoc).dataDoc)['image'] = this.dataDoc['image']; + + // console.log('HI' + this.Document.image); + // const imageSnapshot = Docs.Create.ImageDocument(StrCast(this.Document.image), { + // _nativeWidth: Doc.NativeWidth(this.layoutDoc), + // _nativeHeight: Doc.NativeHeight(this.layoutDoc), + // x: NumCast(this.layoutDoc.x), + // y: NumCast(this.layoutDoc.y), + // onClick: FollowLinkScript(), + // _width: 150, + // _height: 150, + // title: '--snapshot' + NumCast(this.layoutDoc._layout_currentTimecode) + ' image-', + // }); + // // return imageSnapshot; + // imageSnapshot['x'] = this.layoutDoc['x']; + // imageSnapshot['y'] = this.layoutDoc['y']; + + // const newDoc = Docs.Create.TextDocument(selection); + // newDoc.text = selection; + // newDoc['backgroundColor'] = 'lightgray'; + // newDoc.data = imageSnapshot; + // this.createDropTarget(this., this.fieldKey + '_0', 0) + // Doc.AddEmbedding(imageSnapshot, newDoc); + // Doc.SetContainer(imageSnapshot, newDoc); + // Doc.AddToMyOverlay(imageSnapshot); + + // if (StrCast(this.dataDoc.data).includes('Keyword: ')) { + // console.log('HERE' + this.dataDoc.data); + // this.fetchImages(StrCast(this.dataDoc.data).split('Keyword: ')[1]); + // } + // // const node = schema.nodes.dashDoc.create({ + // // width: NumCast(newDoc._width), + // // height: NumCast(newDoc._height), + // // title: 'dashDoc', + // // docId: newDoc[Id], + // // float: 'unset', + // // }); + // // Doc.AddEmbedding(images!, newDoc); + // // Doc.SetContainer(images, this.newDoc); + // } else { + // newDoc.text = dataSplit[1]; + // newDoc['backgroundColor'] = 'lightgray'; + this.addDoc(newDoc, this.fieldKey + '_0'); + // this.addDoc() + // } + // newDoc?.addDocument?.(images); + // if there is text from the pdf ai cards, put the question on the front side. // eslint-disable-next-line prefer-destructuring // newDoc.text = dataSplit[1]; - newDoc['backgroundColor'] = 'lightgray'; - this.addDoc(newDoc, this.fieldKey + '_0'); + // newDoc['backgroundColor'] = 'lightgray'; + // this.addDoc(newDoc, this.fieldKey + '_0'); // DocCast(this.dataDoc[this.fieldKey + '_0'])[DocData].text = dataSplit[1]; // DocCast(this.dataDoc[this.fieldKey + '_0']).text = dataSplit[1]; // console.log('HI' + DocCast(this.dataDoc[this.fieldKey + '_0']).text); @@ -556,7 +785,7 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() } if (!this.dataDoc[this.fieldKey + '_1'] && !this._isEmpty) { - const dataSplit = StrCast(this.dataDoc.data).split('Answer: '); + const dataSplit = StrCast(this.dataDoc.data).includes('Keyword: ') ? StrCast(this.dataDoc.data).split('Keyword: ') : StrCast(this.dataDoc.data).split('Answer: '); const newDoc = Docs.Create.TextDocument(dataSplit[0]); this.addDoc(newDoc, this.fieldKey + '_1'); // if there is text from the pdf ai cards, put the answer on the alternate side. @@ -592,15 +821,21 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() </div> ) : null} </div> - <div className="submit-button" style={{ overflow: 'hidden', display: this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate' ? 'none' : 'flex' }}> - <button type="button" onClick={this.handleRenderGPTClick} style={{ borderRadius: '2px', marginBottom: '3px' }}> - Submit - </button> - </div> - <div className="submit-button" style={{ overflow: 'hidden', marginBottom: '2px', display: this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate' ? 'flex' : 'none' }}> - <button type="button" onClick={this.handleRenderClick} style={{ borderRadius: '2px' }}> - Redo the Question - </button> + <div style={{ display: 'inline-block' }}> + <div className="submit-button" style={{ overflow: 'hidden' }}> + <button style={{ width: '10px' }} onClick={this.listening ? this.stopListening : this.startListening}> + {<FontAwesomeIcon icon="microphone" size="lg" />} + </button> + {this.layoutDoc[`_${this._props.fieldKey}_usePath`] !== 'alternate' ? ( + <button type="button" onClick={this.handleRenderGPTClick} style={{ borderRadius: '2px', marginBottom: '3px' }}> + Submit + </button> + ) : ( + <button type="button" onClick={this.handleRenderClick} style={{ borderRadius: '2px' }}> + Redo the Question + </button> + )} + </div> </div> </div> ); diff --git a/src/client/views/nodes/ImageBox.tsx b/src/client/views/nodes/ImageBox.tsx index fde9fe582..65a645560 100644 --- a/src/client/views/nodes/ImageBox.tsx +++ b/src/client/views/nodes/ImageBox.tsx @@ -375,7 +375,7 @@ export class ImageBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { this._loading = true; try { const hrefBase64 = await this.createCanvas(); - const response = await gptImageLabel(hrefBase64, 'Make flashcards out of this text with each question and answer labeled as question and answer. Do not label each flashcard and do not include asterisks: '); + const response = await gptImageLabel(hrefBase64, 'Make flashcards out of this image with each question and answer labeled as "question" and "answer". Do not label each flashcard and do not include asterisks: '); console.log(response); AnchorMenu.Instance.transferToFlashcard(response); } catch (error) { diff --git a/src/client/views/nodes/formattedText/FormattedTextBox.tsx b/src/client/views/nodes/formattedText/FormattedTextBox.tsx index 2091ee89a..0be51816f 100644 --- a/src/client/views/nodes/formattedText/FormattedTextBox.tsx +++ b/src/client/views/nodes/formattedText/FormattedTextBox.tsx @@ -909,11 +909,11 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FormattedTextB ); const appearance = cm.findByDescription('Appearance...'); const appearanceItems: ContextMenuProps[] = appearance && 'subitems' in appearance ? appearance.subitems : []; - appearanceItems.push({ - description: 'Find image tags', - event: this.findImageTags, - icon: !this.Document._layout_noSidebar ? 'eye-slash' : 'eye', - }); + // appearanceItems.push({ + // description: 'Find image tags', + // event: this.findImageTags, + // icon: !this.Document._layout_noSidebar ? 'eye-slash' : 'eye', + // }); appearanceItems.push({ description: !this.Document._layout_noSidebar ? 'Hide Sidebar Handle' : 'Show Sidebar Handle', event: () => { @@ -1040,10 +1040,14 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FormattedTextB const hrefComplete = `${hrefParts[0]}_o.${hrefParts[1]}`; try { const hrefBase64 = await FormattedTextBox.imageUrlToBase64(u); - const response = await gptImageLabel(hrefBase64, 'Tell me what you see in this image'); + const response = await gptImageLabel( + hrefBase64, + 'Make flashcards out of this text and image with each question and answer labeled as question and answer. Do not label each flashcard and do not include asterisks: ' + (this.dataDoc.text as RichTextField)?.Text + ); //const response = await gptImageLabel(u, 'Make flashcards out of this text with each question and answer labeled as question and answer. Do not label each flashcard and do not include asterisks: '); - console.log(response); - // AnchorMenu.Instance.transferToFlashcard(response); + // console.log(response); + AnchorMenu.Instance.transferToFlashcard(response || 'Something went wrong'); + // this._props.addto_; // this.Document[DocData].description = response.trim(); // return response; // Return the response } catch (error) { @@ -1399,7 +1403,35 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FormattedTextB { fireImmediately: true } ); this.tryUpdateScrollHeight(); + + console.log('S' + this.Document.image); + console.log('S' + DocCast(this.Document).image); + if (this.Document.image) { + // const node = schema.nodes.dashDoc.create({ + // width: 200, + // height: 200, + // title: 'dashDoc', + // docId: DocCast(this.Document.image)[Id], + // float: 'unset', + // }); + + // DocCast(this.Document.image)._freeform_fitContentsToBox = true; + // Doc.SetContainer(DocCast(this.Document.image), this.Document); + // const view = this._editorView!; + // try { + // this._inDrop = true; + // const pos = view.posAtCoords({ left: 0, top: 0 })?.pos; + // pos && view.dispatch(view.state.tr.insert(pos, node)); + // } catch (err) { + // console.log('Drop failed', err); + // } + // console.log('LKSDFLJ'); + this.addDocument?.(DocCast(this.Document.image)); + } + + //if (this.Document.image) this.addDocument?.(DocCast(this.Document.image)); setTimeout(this.tryUpdateScrollHeight, 250); + AnchorMenu.Instance.addToCollection = this._props.DocumentView?.()._props.addDocument; } clipboardTextSerializer = (slice: Slice): string => { diff --git a/src/client/views/pdf/AnchorMenu.tsx b/src/client/views/pdf/AnchorMenu.tsx index 87031487f..dcd76979f 100644 --- a/src/client/views/pdf/AnchorMenu.tsx +++ b/src/client/views/pdf/AnchorMenu.tsx @@ -142,6 +142,7 @@ export class AnchorMenu extends AntimodeMenu<AntimodeMenuProps> { console.log('Arr ' + i + ': ' + senArr[i]); const newDoc = Docs.Create.ComparisonDocument(senArr[i], { _layout_isFlashcard: true, _width: 300, _height: 300 }); newDoc.text = senArr[i]; + collectionArr.push(newDoc); } // create a new carousel collection of these flashcards @@ -152,6 +153,7 @@ export class AnchorMenu extends AntimodeMenu<AntimodeMenuProps> { _layout_autoHeight: true, }); + console.log(collectionArr); newCol.x = this._x; newCol.y = this._y; newCol.zIndex = 100; diff --git a/src/client/views/pdf/PDFViewer.tsx b/src/client/views/pdf/PDFViewer.tsx index 27a523465..1af7b1fc0 100644 --- a/src/client/views/pdf/PDFViewer.tsx +++ b/src/client/views/pdf/PDFViewer.tsx @@ -34,6 +34,8 @@ import { Docs } from '../../documents/Documents'; import './PDFViewer.scss'; import { GPTCallType, gptAPICall } from '../../apis/gpt/GPT'; import ReactLoading from 'react-loading'; +import html2canvas from 'html2canvas'; +import SpeechRecognition, { useSpeechRecognition } from 'react-speech-recognition'; // pdfjsLib.GlobalWorkerOptions.workerSrc = `/assets/pdf.worker.js`; // The workerSrc property shall be specified. @@ -66,6 +68,43 @@ export class PDFViewer extends ObservableReactComponent<IViewerProps> { super(props); makeObservable(this); } + // @observable transcriptRef = React.createRef(); + // @observable startBtnRef = React.createRef(); + // @observable stopBtnRef = React.createRef(); + // @observable transcriptElement = ''; + + // handleResult = (e: SpeechRecognitionEvent) => { + // let interimTranscript = ''; + // let finalTranscript = ''; + // console.log('H'); + // for (let i = e.resultIndex; i < e.results.length; i++) { + // const transcript = e.results[i][0].transcript; + // if (e.results[i].isFinal) { + // finalTranscript += transcript; + // } else { + // interimTranscript += transcript; + // } + // } + // console.log(interimTranscript); + // this.transcriptElement = finalTranscript || interimTranscript; + // }; + + // startListening = () => { + // const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; + // if (SpeechRecognition) { + // console.log('here'); + // const recognition = new SpeechRecognition(); + // recognition.continuous = true; // Continue listening even if the user pauses + // recognition.interimResults = true; // Show interim results + // recognition.lang = 'en-US'; // Set language (optional) + // recognition.onresult = this.handleResult.bind(this); + // // recognition.onend = this.handleEnd.bind(this); + + // recognition.start(); + // // this.handleResult; + // // recognition.stop(); + // } + // }; @observable _pageSizes: { width: number; height: number }[] = []; @observable _savedAnnotations = new ObservableMap<number, HTMLDivElement[]>(); @@ -410,7 +449,106 @@ export class PDFViewer extends ObservableReactComponent<IViewerProps> { }; gptPDFFlashcards = async () => { + // const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; + // if (SpeechRecognition) { + // this.recognition = new SpeechRecognition(); + // this.recognition.continuous = true; // Continue listening even if the user pauses + // this.recognition.interimResults = true; // Show interim results + // this.recognition.lang = 'en-US'; // Set language (optional) + + // this.recognition.onresult = this.handleResult; + // this.recognition.onerror = this.handleError; + // this.recognition.onend = this.handleEnd; + // } else { + // console.error("Browser doesn't support Speech Recognition API"); + // } + // const Dictaphone = () => { + // const { transcript, resetTranscript } = useSpeechRecognition(); + + // if (!SpeechRecognition.browserSupportsSpeechRecognition()) { + // return null; + // } + + // return ( + // <div> + // <button onClick={e => SpeechRecognition.startListening}>Start</button> + // <button onClick={e => SpeechRecognition.stopListening}>Stop</button> + // <button onClick={resetTranscript}>Reset</button> + // <p>{transcript}</p> + // </div> + // ); + // }; + // const grammar = + // '#JSGF V1.0; grammar colors; public <color> = aqua | azure | beige | bisque | black | blue | brown | chocolate | coral | crimson | cyan | fuchsia | ghostwhite | gold | goldenrod | gray | green | indigo | ivory | khaki | lavender | lime | linen | magenta | maroon | moccasin | navy | olive | orange | orchid | peru | pink | plum | purple | red | salmon | sienna | silver | snow | tan | teal | thistle | tomato | turquoise | violet | white | yellow ;'; + // const recognition = new SpeechRecognition(); + // const speechRecognitionList = new SpeechGrammarList(); + // speechRecognitionList.addFromString(grammar, 1); + // recognition.grammars = speechRecognitionList; + // recognition.continuous = false; + // recognition.lang = 'en-US'; + // recognition.interimResults = false; + // recognition.maxAlternatives = 1; + + // const diagnostic = document.querySelector('.output'); + // const bg = document.querySelector('html'); + + // document.body.onclick = () => { + // recognition.start(); + // console.log('Ready to receive a color command.'); + // }; + + // recognition.onresult = event => { + // const color = event.results[0][0].transcript; + // diagnostic!.textContent = `Result received: ${color}`; + // bg!.style.backgroundColor = color; + // }; + + //const SpeechRecognition = SpeechRecognition || webkitSpeechRecognition; + + // recognition.continous = true; + // recognition.interimResults = true; + // recognition.lang = 'en-US'; + const queryText = this._selectionText; + + // const canvas = await html2canvas(); + // const image = canvas.toDataURL("image/png", 1.0); + // (window as any) + // .html2canvas(this._marqueeref, { + // x: 100, + // y: 100, + // width: 100, + // height: 100, + // }) + // .then((canvas: HTMLCanvasElement) => { + // const img = canvas.toDataURL('image/png'); + + // const link = document.createElement('a'); + // link.href = img; + // link.download = 'screenshot.png'; + + // document.body.appendChild(link); + // link.click(); + // link.remove(); + // }); + + // var range = window.getSelection()?.getRangeAt(0); + // var selectionContents = range?.extractContents(); + // var div = document.createElement("div"); + // div.style.color = "yellow"; + // div.appendChild(selectionContents!); + // range!.insertNode(div); + + // const canvas = document.createElement('canvas'); + // const scaling = 1 / (this._props.NativeDimScaling?.() || 1); + // const w = AnchorMenu.Instance.marqueeWidth * scaling; + // const h = AnchorMenu.Instance.marqueeHeight * scaling; + // canvas.width = w; + // canvas.height = h; + // const ctx = canvas.getContext('2d'); // draw image to canvas. scale to target dimensions + // if (ctx) { + // this._marqueeref && ctx.drawImage(div, NumCast(this._marqueeref.current?.left) * scaling, NumCast(this._marqueeref.current?.top) * scaling, w, h, 0, 0, w, h); + // } this._loading = true; try { if (this._selectionText === '') { |