diff options
Diffstat (limited to 'src/client/views/nodes/ComparisonBox.tsx')
-rw-r--r-- | src/client/views/nodes/ComparisonBox.tsx | 258 |
1 files changed, 156 insertions, 102 deletions
diff --git a/src/client/views/nodes/ComparisonBox.tsx b/src/client/views/nodes/ComparisonBox.tsx index 3d2a1f0a9..ce29a63b4 100644 --- a/src/client/views/nodes/ComparisonBox.tsx +++ b/src/client/views/nodes/ComparisonBox.tsx @@ -1,4 +1,5 @@ import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; +import { MathJax, MathJaxContext } from 'better-react-mathjax'; import { Tooltip } from '@mui/material'; import { action, computed, makeObservable, observable, reaction } from 'mobx'; import { observer } from 'mobx-react'; @@ -33,6 +34,8 @@ import { FollowLinkScript } from '../../documents/DocUtils'; import { schema } from '../nodes/formattedText/schema_rts'; import { Id } from '../../../fields/FieldSymbols'; import axios from 'axios'; +import ReactMarkdown from 'react-markdown'; +import { WebField, nullAudio } from '../../../fields/URLField'; const API_URL = 'https://api.unsplash.com/search/photos'; @observer @@ -51,9 +54,13 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() @observable private _outputValue = ''; @observable private _loading = false; @observable private _isEmpty = false; + @observable private _audio: Doc = Docs.Create.TextDocument(''); @observable childActive = false; @observable _yRelativeToTop: boolean = true; @observable _animating = ''; + @observable mathJaxConfig = { + loader: { load: ['input/asciimath'] }, + }; private _ref = React.createRef<HTMLDivElement>(); get revealOp() { @@ -237,11 +244,71 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() this.childActive = true; }; - @action handleRenderGPTClick = () => { + @action handleRenderGPTClick = async () => { // Call the GPT model and get the output + // await this.pushInfo(); + // console.log('PHONETIC TRANSCRIPTION: ' + DocCast(this._audio)[DocData]); + // this.Document.audio = this._audio; + console.log('Phonetic transcription: ' + DocCast(this.Document.audio).phoneticTranscription); + const phonTrans = DocCast(this.Document.audio).phoneticTranscription; + if (phonTrans) { + this._inputValue = StrCast(phonTrans); + console.log('INPUT:' + this._inputValue); + this.askGPTPhonemes(this._inputValue); + } else if (this._inputValue) this.askGPT(GPTCallType.QUIZ); this.layoutDoc[`_${this._props.fieldKey}_usePath`] = 'alternate'; this._outputValue = ''; - if (this._inputValue) this.askGPT(GPTCallType.QUIZ); + }; + + askGPTPhonemes = async (phonemes: string) => { + const question0 = 'These phonemes should match "what is your name": ' + phonemes + 'Use the structure of this response as guidance: "Your pronunciation of the vowel in "what" is not front enough. It should be pronounced like /uh/."'; + const question = + 'Match the following phonemes with each word in "what is your name": ' + + phonemes + + '. Note if a letter is added or missing that changes the meaning. If the mismatches are not allophones of the same phoneme and they are far away from each other on the vowel chart, list the difference. For the mismatches, use the structure of this response as guidance: "Your pronunciation of the vowel in "what" is not front enough. It should be pronounced like /uh/."'; + const question1 = + 'Consider all phonetic transcriptions of "what is your name" with different vowel pronunications. Compares these phonemes with that phonetic transcription: ' + + phonemes + + '. If the differences are not allophones of the same phoneme and they are far away from each other on the vowel chart, list the difference. If it is missing or added a letter, say that.'; + //Only describe sound changes that will change the meaning drastically. Provide two sentences describing this. Do not list differences that do not change the meaning.'; + const question2 = 'Is this a valid phonetic transcription of the phrase "what is your name": ' + phonemes + '.'; + // If the difference found will definitely make the word be not understood and change the meaning, then list it. If the difference is minimal or the sound matches, do not list it.'; + //These phonemes are supposed to match the pronunciation of ' + + //'hello: ' + + //phonemes + + //'. If there is a difference in sound that would change the meaning of the word or sentence, such as "pen" vs. "pin", describe that. Otherwise say "good job."'; + // Identify any differences in pronunciation that would change the meaning of the intended word or sentence and only list differences that would change the meaning. If there are no major differences, say "Good job." If there are differences, describe it in terms of sounds in sentences.'; + // const question = + // 'These phonemes are supposed to match the pronunciation of ' + + // StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_0']).text)?.Text) + + // '. Identify any differences in pronunciation that would change the meaning of the intended word or sentence.'; + console.log(question); + const res = await gptAPICall(question, GPTCallType.PRONUNCIATION); + console.log('GPT: ' + res); + if (!res) { + console.error('GPT call failed'); + return; + } + // const questionText = 'Question: ' + StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text); + // const rubricText = ' Rubric: ' + StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_0']).text)?.Text); + // const queryText = questionText + ' UserAnswer: ' + this._inputValue + '. ' + rubricText; + // this._loading = true; + }; + + pushInfo = async () => { + const formData = new FormData(); + + console.log(DocCast(this._audio).dataDoc); + const audio = { + file: this._audio.url, + }; + const response = await axios.post('http://localhost:105/recognize/', audio, { + headers: { + 'Content-Type': 'application/json', + }, + }); + this.Document.phoneticTranscription = response.data['transcription']; + console.log('RESPONSE: ' + response.data['transcription']); }; @action handleHover = () => { @@ -391,22 +458,41 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() var collectionArr: Doc[] = []; for (let i = 1; i < senArr?.length!; i++) { const newDoc = Docs.Create.ComparisonDocument(senArr![i], { _layout_isFlashcard: true, _width: 300, _height: 300 }); - // newDoc.text = senArr![i]; - // const dataSplit = StrCast(this.dataDoc.data).includes('Keyword: ') ? StrCast(this.dataDoc.data).split('Keyword: ') : StrCast(this.dataDoc.data).split('Answer: '); - // newDoc[this.fieldKey + '_0'] = Docs.Create.TextDocument(dataSplit[1]); - // newDoc[this.fieldKey + '_1'] = Docs.Create.TextDocument(dataSplit[0]); - // newDoc['backgroundColor'] = 'lightgray'; - // newDoc.image = + if (StrCast(senArr![i]).includes('Keyword: ')) { - console.log('Here'); - const img = await this.fetchImages(StrCast(senArr![i]).split('Keyword: ')[1]); - console.log(img); - DocCast(newDoc).image = img; - // DocCast(DocCast(newDoc).dataDoc)['image'] = img; - Doc.AddToMyOverlay(img); + const question = StrCast(senArr![i]).split('Keyword: '); + const img = await this.fetchImages(question[1]); + // newDoc['image'] = img; + // const newDoc = Docs.Create.TextDocument(dataSplit[1]); + const textSide1 = question[0].includes('Answer: ') ? question[0].split('Answer: ')[0] : question[0]; + const textDoc1 = Docs.Create.TextDocument(question[0]); + const rtfiel = new RichTextField( + JSON.stringify({ + doc: { + type: 'doc', + content: [ + { + type: 'paragraph', + attrs: { align: null, color: null, id: null, indent: null, inset: null, lineSpacing: null, paddingBottom: null, paddingTop: null }, + content: [ + { type: 'text', text: question[0].includes('Answer: ') ? question[0].split('Answer: ')[0] : question[0] }, + { type: 'dashDoc', attrs: { width: '200px', height: '200px', title: 'dashDoc', float: 'unset', hidden: false, docId: img![Id] } }, + ], + }, + ], + }, + selection: { type: 'text', anchor: 2, head: 2 }, + }), + textSide1 + ); + + textDoc1[DocData].text = rtfiel; + DocCast(newDoc)[DocData][this.fieldKey + '_1'] = textDoc1; + + DocCast(newDoc)[DocData][this.fieldKey + '_0'] = Docs.Create.TextDocument(question[0].includes('Answer: ') ? question[0].split('Answer: ')[1] : question[1]); + // Doc.AddToMyOverlay(img!); } - console.log('ARR' + i + senArr![i]); collectionArr.push(newDoc); } this.createFlashcardPile(collectionArr, true); @@ -473,7 +559,10 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() if (callType == GPTCallType.CHATCARD) { DocCast(this.dataDoc[this.props.fieldKey + '_0'])[DocData].text = res; // this.flipFlashcard(); - } else if (callType == GPTCallType.QUIZ) this._outputValue = res; + } else if (callType == GPTCallType.QUIZ) { + console.log(this._inputValue); + this._outputValue = res.replace(/UserAnswer/g, "user's answer").replace(/Rubric/g, 'rubric'); + } // DocCast(this.dataDoc[this.props.fieldKey + '_0'])[DocData].text = res; // this._outputValue = res; else if (callType === GPTCallType.FLASHCARD) { @@ -511,7 +600,7 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() if (i.className !== 'ProseMirror-separator') await this.getImageDesc(i.src); } this._loading = false; - this.flipFlashcard(); + // this.flipFlashcard(); } // console.log('HI' + this.ProseRef?.getElementsByTagName('img')); }; @@ -558,22 +647,9 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() _height: 150, title: '--snapshot' + NumCast(this.layoutDoc._layout_currentTimecode) + ' image-', }); - // return imageSnapshot; imageSnapshot['x'] = this.layoutDoc['x']; imageSnapshot['y'] = this.layoutDoc['y']; - - // const newDoc = Docs.Create.TextDocument(selection); - // newDoc.text = selection; - // newDoc['backgroundColor'] = 'lightgray'; - - // Doc.AddToMyOverlay(imageSnapshot); return imageSnapshot; - return data.results[0].urls.small; - // Doc.AddEmbedding(newDoc, imageSnapshot); - // Doc.MakeEmbedding(imageSnapshot); - // return imageSnapshot; - // imageSnapshot['zIndex'] = 20000; - // this._props.DocumentView?.()._props.addDocument?.(newDoc); } catch (error) { console.log(error); } @@ -660,6 +736,14 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() ContextMenu.Instance.displayMenu(x, y); }; + evaluatePronunciation = () => { + const newAudio = Docs.Create.AudioDocument(nullAudio, { _width: 200, _height: 100 }); + this.Document.audio = newAudio[DocData]; + // DocCast(this.Document.embedContainer)()._props.addDocument?.(newAudio); + this._props.DocumentView?.()._props.addDocument?.(newAudio); + // Doc.AddToMyOverlay(newAudio); + }; + render() { const clearButton = (which: string) => ( <Tooltip title={<div className="dash-tooltip">remove</div>}> @@ -680,6 +764,8 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() // whichDoc['backgroundColor'] = this.layoutDoc['backgroundColor']; return targetDoc || layoutString ? ( + // <MathJaxContext config={this.mathJaxConfig}> + // <MathJax> <> <DocumentView // eslint-disable-next-line react/jsx-props-no-spreading @@ -701,8 +787,10 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() hideLinkButton pointerEvents={this.childActive ? undefined : returnNone} /> + {/* </MathJax> */} {/* <div style={{ position: 'absolute', top: '-5px', left: '2px' }}>{layoutString ? null : clearButton(whichSlot)}</div> */} - </> // placeholder image if doc is missingleft: `${NumCast(this.layoutDoc.width, 200) - 33}px` + {/* </MathJaxContext> // placeholder image if doc is missingleft: `${NumCast(this.layoutDoc.width, 200) - 33}px` */} + </> ) : ( <div className="placeholder"> <FontAwesomeIcon className="upload-icon" icon="cloud-upload-alt" size="lg" /> @@ -732,79 +820,36 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() if (!this.dataDoc[this.fieldKey + '_0'] && !this._isEmpty) { const dataSplit = StrCast(this.dataDoc.data).includes('Keyword: ') ? StrCast(this.dataDoc.data).split('Keyword: ') : StrCast(this.dataDoc.data).split('Answer: '); const newDoc = Docs.Create.TextDocument(dataSplit[1]); - if (this.Document.image) DocCast(newDoc).image = DocCast(this.Document.image); - // console.log('D' + this.Document.image); - //if (DocCast(DocCast(newDoc).dataDoc)) DocCast(DocCast(newDoc).dataDoc)['image'] = this.dataDoc['image']; - - // console.log('HI' + this.Document.image); - // const imageSnapshot = Docs.Create.ImageDocument(StrCast(this.Document.image), { - // _nativeWidth: Doc.NativeWidth(this.layoutDoc), - // _nativeHeight: Doc.NativeHeight(this.layoutDoc), - // x: NumCast(this.layoutDoc.x), - // y: NumCast(this.layoutDoc.y), - // onClick: FollowLinkScript(), - // _width: 150, - // _height: 150, - // title: '--snapshot' + NumCast(this.layoutDoc._layout_currentTimecode) + ' image-', - // }); - // // return imageSnapshot; - // imageSnapshot['x'] = this.layoutDoc['x']; - // imageSnapshot['y'] = this.layoutDoc['y']; - - // const newDoc = Docs.Create.TextDocument(selection); - // newDoc.text = selection; - // newDoc['backgroundColor'] = 'lightgray'; - // newDoc.data = imageSnapshot; - // this.createDropTarget(this., this.fieldKey + '_0', 0) - // Doc.AddEmbedding(imageSnapshot, newDoc); - // Doc.SetContainer(imageSnapshot, newDoc); - // Doc.AddToMyOverlay(imageSnapshot); - - // if (StrCast(this.dataDoc.data).includes('Keyword: ')) { - // console.log('HERE' + this.dataDoc.data); - // this.fetchImages(StrCast(this.dataDoc.data).split('Keyword: ')[1]); - // } - // // const node = schema.nodes.dashDoc.create({ - // // width: NumCast(newDoc._width), - // // height: NumCast(newDoc._height), - // // title: 'dashDoc', - // // docId: newDoc[Id], - // // float: 'unset', - // // }); - // // Doc.AddEmbedding(images!, newDoc); - // // Doc.SetContainer(images, this.newDoc); - // } else { - // newDoc.text = dataSplit[1]; - // newDoc['backgroundColor'] = 'lightgray'; this.addDoc(newDoc, this.fieldKey + '_0'); - // this.addDoc() - // } - // newDoc?.addDocument?.(images); - - // if there is text from the pdf ai cards, put the question on the front side. - // eslint-disable-next-line prefer-destructuring - // newDoc.text = dataSplit[1]; - // newDoc['backgroundColor'] = 'lightgray'; - // this.addDoc(newDoc, this.fieldKey + '_0'); - // DocCast(this.dataDoc[this.fieldKey + '_0'])[DocData].text = dataSplit[1]; - // DocCast(this.dataDoc[this.fieldKey + '_0']).text = dataSplit[1]; - // console.log('HI' + DocCast(this.dataDoc[this.fieldKey + '_0']).text); - //console.log('HEREEE' + StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_0']).text)?.Text)); } if (!this.dataDoc[this.fieldKey + '_1'] && !this._isEmpty) { const dataSplit = StrCast(this.dataDoc.data).includes('Keyword: ') ? StrCast(this.dataDoc.data).split('Keyword: ') : StrCast(this.dataDoc.data).split('Answer: '); const newDoc = Docs.Create.TextDocument(dataSplit[0]); this.addDoc(newDoc, this.fieldKey + '_1'); - // if there is text from the pdf ai cards, put the answer on the alternate side. - // eslint-disable-next-line prefer-destructuring - - // newDoc[DocData].text = dataSplit[0]; - // console.log('HEREEE' + StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text)); - // console.log('HI' + DocCast(this.dataDoc[this.fieldKey + '_1']).text); - // DocCast(this.dataDoc[this.props.fieldKey + '_1'])[DocData].text = dataSplit[0]; - // console.log('HEREEE' + StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_0']).text)?.Text)); - // DocCast(this.dataDoc[this.fieldKey + '_1'])[DocData].text = dataSplit[0]; + // if (this.Document.image) { + // console.log('ID: ' + DocCast(this.Document.image)[Id]); + // const rtfiel = new RichTextField( + // JSON.stringify({ + // doc: { + // type: 'doc', + // content: [ + // { + // type: 'paragraph', + // attrs: { align: null, color: null, id: null, indent: null, inset: null, lineSpacing: null, paddingBottom: null, paddingTop: null }, + // content: [ + // { type: 'text', text: dataSplit[0] }, + // { type: 'dashDoc', attrs: { width: '200px', height: '200px', title: 'dashDoc', float: 'unset', hidden: false, docId: DocCast(this.Document.image)[Id] } }, + // ], + // }, + // ], + // }, + // selection: { type: 'text', anchor: 2, head: 2 }, + // }) + // ); + + // newDoc[DocData].text = rtfiel; + // } } // render the QuizCards @@ -819,7 +864,10 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() <textarea value={this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate' ? this._outputValue : this._inputValue} onChange={this.handleInputChange} - onScroll={e => e.stopPropagation()} + onScroll={e => { + e.stopPropagation(); + e.preventDefault(); + }} placeholder={!this.layoutDoc[`_${this._props.fieldKey}_usePath`] ? 'Enter a response for GPT to evaluate.' : ''} readOnly={this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate'}></textarea> @@ -834,19 +882,25 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() <div className="submit-buttonschema-header-button" onPointerDown={e => this.openContextMenu(e.clientX, e.clientY)} - style={{ position: 'absolute', top: '1px', left: '11px', zIndex: '100', width: '5px', height: '5px', cursor: 'pointer' }}> + style={{ position: 'absolute', top: '5px', left: '11px', zIndex: '100', width: '5px', height: '5px', cursor: 'pointer' }}> <FontAwesomeIcon color={'white'} icon="caret-down" /> </div> <button className="submit-buttonrecord" onClick={this._listening ? this.stopListening : this.startListening} style={{ background: this._listening ? 'lightgray' : '', borderRadius: '2px' }}> {<FontAwesomeIcon icon="microphone" size="lg" />} </button> + <button + className="submit-buttonpronunciation" + onClick={this.evaluatePronunciation} + style={{ display: 'inline-flex', alignItems: 'center', background: this._listening ? 'lightgray' : '', borderRadius: '2px', width: '100%' }}> + Evaluate Pronunciation + </button> {this.layoutDoc[`_${this._props.fieldKey}_usePath`] !== 'alternate' ? ( - <button className="submit-buttonsubmit" type="button" onClick={this.handleRenderGPTClick} style={{ borderRadius: '2px', marginBottom: '3px', width: '300%' }}> + <button className="submit-buttonsubmit" type="button" onClick={this.handleRenderGPTClick} style={{ borderRadius: '2px', marginBottom: '3px', width: '100%' }}> Submit </button> ) : ( - <button className="submit-buttonsubmit" type="button" onClick={this.handleRenderClick} style={{ borderRadius: '2px' }}> + <button className="submit-buttonsubmit" type="button" onClick={this.handleRenderClick} style={{ borderRadius: '2px', marginBottom: '3px', width: '100%' }}> Redo the Question </button> )} @@ -871,7 +925,7 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() }} // onPointerUp={() => (this._isAnyChildContentActive = true)} > - {!this.layoutDoc[`_${this._props.fieldKey}_usePath`] && StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text) === '' && !this.childActive ? <p className="explain">Enter text in the flashcard. </p> : null} + {/* {!this.layoutDoc[`_${this._props.fieldKey}_usePath`] && StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text) === '' && !this.childActive ? <p className="explain">Enter text in the flashcard. </p> : null} */} {displayBox(`${this.fieldKey}_${side === 0 ? 1 : 0}`, side, this._props.PanelWidth() - 3)} {this._loading ? ( <div className="loading-spinner" style={{ position: 'absolute' }}> |