diff options
Diffstat (limited to 'src/client')
-rw-r--r-- | src/client/apis/gpt/GPT.ts | 2 | ||||
-rw-r--r-- | src/client/views/nodes/ComparisonBox.tsx | 1629 | ||||
-rw-r--r-- | src/client/views/nodes/ComparisonBoxNew.tsx | 848 | ||||
-rw-r--r-- | src/client/views/nodes/ImageBox.tsx | 93 | ||||
-rw-r--r-- | src/client/views/nodes/LabelBigText.js | 270 | ||||
-rw-r--r-- | src/client/views/nodes/LabelBox.scss | 35 | ||||
-rw-r--r-- | src/client/views/nodes/LabelBox.tsx | 67 | ||||
-rw-r--r-- | src/client/views/nodes/formattedText/FormattedTextBox.scss | 31 | ||||
-rw-r--r-- | src/client/views/nodes/formattedText/FormattedTextBox.tsx | 45 | ||||
-rw-r--r-- | src/client/views/webcam/WebCamLogic.js | 292 |
10 files changed, 2763 insertions, 549 deletions
diff --git a/src/client/apis/gpt/GPT.ts b/src/client/apis/gpt/GPT.ts index 3550b6216..2e4dbe9a0 100644 --- a/src/client/apis/gpt/GPT.ts +++ b/src/client/apis/gpt/GPT.ts @@ -64,7 +64,7 @@ const callTypeMap: { [type: string]: GPTCallOpts } = { pronunciation: { model: 'gpt-4-turbo', maxTokens: 1024, - temp: 0.3, //0.3 + temp: 0.1, //0.3 prompt: '', }, }; diff --git a/src/client/views/nodes/ComparisonBox.tsx b/src/client/views/nodes/ComparisonBox.tsx index 139978a13..4a0820c95 100644 --- a/src/client/views/nodes/ComparisonBox.tsx +++ b/src/client/views/nodes/ComparisonBox.tsx @@ -26,7 +26,6 @@ import { DocumentView } from './DocumentView'; import { FieldView, FieldViewProps } from './FieldView'; import { FormattedTextBox } from './formattedText/FormattedTextBox'; import ReactLoading from 'react-loading'; - import { ContextMenu } from '../ContextMenu'; import { ContextMenuProps } from '../ContextMenuItem'; import { tickStep } from 'd3'; @@ -39,6 +38,7 @@ import ReactMarkdown from 'react-markdown'; import { WebField, nullAudio } from '../../../fields/URLField'; const API_URL = 'https://api.unsplash.com/search/photos'; + @observer export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { public static LayoutString(fieldKey: string) { @@ -62,7 +62,12 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() @observable mathJaxConfig = { loader: { load: ['input/asciimath'] }, }; - private _ref = React.createRef<HTMLDivElement>(); + @observable private _listening = false; + @observable transcriptElement = ''; + @observable private frontSide = false; + SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; + @observable recognition = new this.SpeechRecognition(); + _closeRef = React.createRef<HTMLDivElement>(); get revealOp() { return this.layoutDoc[`_${this._props.fieldKey}_revealOp`]; @@ -84,24 +89,20 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() } @computed get overlayAlternateIcon() { - const usepath = this.layoutDoc[`_${this._props.fieldKey}_usePath`]; return ( <Tooltip title={<div className="dash-tooltip">flip</div>}> <div className="formattedTextBox-alternateButton" onPointerDown={e => setupMoveUpEvents(e.target, e, returnFalse, emptyFunction, () => { - if (!this.layoutDoc[`_${this._props.fieldKey}_revealOp`] || this.layoutDoc[`_${this._props.fieldKey}_revealOp`] === 'flip') { + if (!this.revealOp || this.revealOp === 'flip') { this.flipFlashcard(); - - // console.log('Print Front of cards: ' + (RTFCast(DocCast(this.dataDoc[this.fieldKey + '_0']).text)?.Text ?? '')); - // console.log('Print Back of cards: ' + (RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text ?? '')); } }) } style={{ - background: this.revealOp === 'hover' ? 'gray' : usepath === 'alternate' ? 'white' : 'black', - color: this.revealOp === 'hover' ? 'black' : usepath === 'alternate' ? 'black' : 'white', + background: this.revealOp === 'hover' ? 'gray' : this.frontSide ? 'white' : 'black', + color: this.revealOp === 'hover' ? 'black' : this.frontSide ? 'black' : 'white', display: 'inline-block', }}> <div key="alternate" className="formattedTextBox-flip"> @@ -116,14 +117,8 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() return ( <div> <Tooltip - title={ - this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate' ? ( - <div className="dash-tooltip">Flip to front side to use GPT</div> - ) : ( - <div className="dash-tooltip">Ask GPT to create an answer on the back side of the flashcard based on your question on the front</div> - ) - }> - <div style={{ position: 'absolute', bottom: '3px', right: '50px', cursor: 'pointer' }} onPointerDown={e => (!this.layoutDoc[`_${this._props.fieldKey}_usePath`] ? this.findImageTags() : null)}> + title={this.frontSide ? <div className="dash-tooltip">Flip to front side to use GPT</div> : <div className="dash-tooltip">Ask GPT to create an answer on the back side of the flashcard based on your question on the front</div>}> + <div style={{ position: 'absolute', bottom: '3px', right: '50px', cursor: 'pointer' }} onPointerDown={e => (!this.frontSide ? this.findImageTags() : null)}> <FontAwesomeIcon icon="lightbulb" size="xl" /> </div> </Tooltip> @@ -146,93 +141,18 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() <FontAwesomeIcon color={this.revealOp === 'hover' ? 'blue' : 'black'} icon="hand-point-up" size="xl" /> </div> </Tooltip> - {/* <Tooltip title={<div className="dash-tooltip">Remove this side of the flashcard</div>}> - <div - style={{ position: 'absolute', bottom: '3px', right: '80px', cursor: 'pointer' }} - onPointerDown={e => this.closeDown(e, this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate' ? this._props.fieldKey + '_1' : this._props.fieldKey + '_0')}> - <FontAwesomeIcon color={this.revealOp === 'hover' ? 'blue' : 'black'} icon="trash-can" size="xl" /> - </div> - </Tooltip> */} - {/* {this.overlayAlternateIcon} */} </div> ); } - componentDidMount() { - this._props.setContentViewBox?.(this); - reaction( - () => this._props.isSelected(), // when this reaction should update - selected => !selected && (this.childActive = false) // what it should update to - ); - } - protected createDropTarget = (ele: HTMLDivElement | null, fieldKey: string, disposerId: number) => { - this._disposers[disposerId]?.(); - // this.childActive = true; - if (ele) { - this._disposers[disposerId] = DragManager.MakeDropTarget(ele, (e, dropEvent) => this.internalDrop(e, dropEvent, fieldKey), this.layoutDoc); - } - }; - - private internalDrop = undoable((e: Event, dropEvent: DragManager.DropEvent, fieldKey: string) => { - if (dropEvent.complete.docDragData) { - const { droppedDocuments } = dropEvent.complete.docDragData; - const added = dropEvent.complete.docDragData.moveDocument?.(droppedDocuments, this.Document, (doc: Doc | Doc[]) => this.addDoc(toList(doc).lastElement(), fieldKey)); - Doc.SetContainer(droppedDocuments.lastElement(), this.dataDoc); - !added && e.preventDefault(); - e.stopPropagation(); // prevent parent Doc from registering new position so that it snaps back into place - // this.childActive = false; - return added; - } - return undefined; - }, 'internal drop'); - - private registerSliding = (e: React.PointerEvent<HTMLDivElement>, targetWidth: number) => { - if (e.button !== 2) { - setupMoveUpEvents( - this, - e, - this.onPointerMove, - emptyFunction, - action((moveEv, doubleTap) => { - if (doubleTap) { - this.childActive = true; - if (!this.dataDoc[this.fieldKey + '_1'] && !this.dataDoc[this.fieldKey]) this.dataDoc[this.fieldKey + '_1'] = DocUtils.copyDragFactory(Doc.UserDoc().emptyNote as Doc); - if (!this.dataDoc[this.fieldKey + '_2'] && !this.dataDoc[this.fieldKey + '_alternate']) this.dataDoc[this.fieldKey + '_2'] = DocUtils.copyDragFactory(Doc.UserDoc().emptyNote as Doc); - } - }), - false, - undefined, - action(() => { - if (this.childActive) return; - this._animating = 'all 200ms'; - // on click, animate slider movement to the targetWidth - this.layoutDoc[this.clipWidthKey] = (targetWidth * 100) / this._props.PanelWidth(); - // this.layoutDoc[this.clipHeightKey] = (targetWidth * 100) / this._props.PanelHeight(); - - setTimeout( - action(() => { - this._animating = ''; - }), - 200 - ); - }) - ); - } - }; - - // private onClick(e: React.PointerEvent<HTMLDivElement>) { - // setupMoveUpEvents( - // this, e, this.onPointerMOve, emptyFunction(), () => {this._isAnyChildContentActive = true;}, emptyFunction(), emptyFunction() - // ) - // } - - @action - private onPointerMove = ({ movementX }: PointerEvent) => { - const width = movementX * this.ScreenToLocalBoxXf().Scale + (this.clipWidth / 100) * this._props.PanelWidth(); - if (width && width > 5 && width < this._props.PanelWidth()) { - this.layoutDoc[this.clipWidthKey] = (width * 100) / this._props.PanelWidth(); + @action handleHover = () => { + if (this.revealOp === 'hover') { + this.layoutDoc[`_${this._props.fieldKey}_revealOp`] = 'flip'; + this.Document.forceActive = false; + } else { + this.layoutDoc[`_${this._props.fieldKey}_revealOp`] = 'hover'; + this.Document.forceActive = true; } - return false; }; @action handleInputChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => { @@ -240,107 +160,62 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() console.log(this._inputValue); }; - // this.closeDown(e, this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate' ? this.fieldKey + '_0' : this.fieldKey + '_1')} @action activateContent = () => { this.childActive = true; }; + @action handleRenderClick = () => { + this.frontSide = !this.frontSide; + }; + @action handleRenderGPTClick = async () => { - // Call the GPT model and get the output - // await this.pushInfo(); - // console.log('PHONETIC TRANSCRIPTION: ' + DocCast(this._audio)[DocData]); - // this.Document.audio = this._audio; console.log('Phonetic transcription: ' + DocCast(this.Document.audio).phoneticTranscription); const phonTrans = DocCast(this.Document.audio).phoneticTranscription; - // const phonTrans = 's'; if (phonTrans) { - // console.log(phonTrans.toString()); this._inputValue = StrCast(phonTrans); console.log('INPUT:' + this._inputValue); this.askGPTPhonemes(this._inputValue); } else if (this._inputValue) this.askGPT(GPTCallType.QUIZ); - this.layoutDoc[`_${this._props.fieldKey}_usePath`] = 'alternate'; + this.frontSide = false; this._outputValue = ''; }; - askGPTPhonemes = async (phonemes: string) => { - const sentence = StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text); - const phon = 'w ʌ ɪ z j ɔː ɹ n e ɪ m '; - const phon2 = 'h ʌ ɛ r j ʌ t ʌ d eɪ'; - const phon3 = 'ʃ eɪ oʊ s i ʃ oʊ z b aɪ ð ə s iː ʃ oʊ'; - const phon4 = 'kamo estas hɔi'; - const phon5 = 'la s e n a l'; - console.log('REG' + this.recognition.lang); - const promptEng = - 'Consider all possible phonetic transcriptions of the intended sentence "' + - sentence + - '" that is standard in American speech without showing the user. Compare each word in the following phonemes with those phonetic transcriptions without displaying anything to the user: "' + - phonemes + - '". Steps to do this: Align the words with each word in the intended sentence by combining the phonemes to get a pronunciation that resembles the word in order. Do not describe phonetic corrections with the phonetic alphabet - describe it by providing other examples of how it should sound. Note if a word or sound missing, including missing vowels and consonants. If there is an additional word that does not match with the provided sentence, say so. For each word, if any letters mismatch and would sound weird in American speech and they are not allophones of the same phoneme and they are far away from each on the ipa vowel chat and that pronunciation is not normal for the meaning of the word, note this difference and explain how it is supposed to sound. Only note the difference if they are not allophones of the same phoneme and if they are far away on the vowel chart. The goal is to be understood, not sound like a native speaker. Just so you know, "i" sounds like "ee" as in "bee", not "ih" as an "lick". Interpret "ɹ" as the same as "r". Interpret "ʌ" as the same as "ə". If "ɚ", "ɔː", and "ɔ" are options for pronunciation, do not choose "ɚ". Ignore differences with colons. Ignore redundant letters and words and sounds and the splitting of words; do not mention this since there could be repeated words in the sentence. Provide a response like this: "Lets work on improving the pronunciation of "coffee." You said "ceeffee," which is close, but we need to adjust the vowel sound. In American English, "coffee" is pronounced /ˈkɔːfi/, with a long "aw" sound. Try saying "kah-fee." Your intonation is good, but try putting a bit more stress on "like" in the sentence "I would like a coffee with milk." This will make your speech sound more natural. Keep practicing, and lets try saying the whole sentence again!"'; - const promptSpa = - 'Consider all possible phonetic transcriptions of the intended sentence "' + - 'como estás hoy' + - '" that is standard in Spanish speech without showing the user. Compare each word in the following phonemes with those phonetic transcriptions without displaying anything to the user: "' + - phon4 + - '". Steps to do this: Align the words with each word in the intended sentence by combining the phonemes to get a pronunciation that resembles the word in order. Do not describe phonetic corrections with the phonetic alphabet - describe it by providing other examples of how it should sound. Note if a word or sound missing, including missing vowels and consonants. If there is an additional word that does not match with the provided sentence, say so. For each word, if any letters mismatch and would sound weird in Spanish speech and they are not allophones of the same phoneme and they are far away from each on the ipa vowel chat and that pronunciation is not normal for the meaning of the word, note this difference and explain how it is supposed to sound. Only note the difference if they are not allophones of the same phoneme and if they are far away on the vowel chart; say good job if it would be understood by a native Spanish speaker. Just so you know, "i" sounds like "ee" as in "bee", not "ih" as an "lick". Interpret "ɹ" as the same as "r". Interpret "ʌ" as the same as "ə". Do not make "θ" and "f" interchangable. Do not make "n" and "ɲ" interchangable. Do not make "e" and "i" interchangable. If "ɚ", "ɔː", and "ɔ" are options for pronunciation, do not choose "ɚ". Ignore differences with colons. Ignore redundant letters and words and sounds and the splitting of words; do not mention this since there could be repeated words in the sentence. Identify "ɔi" sounds like "oy". Ignore accents and do not say anything to the user about this.'; - const promptAll = - 'Consider all possible phonetic transcriptions of the intended sentence "' + - sentence + - '" that is standard in ' + - this.convertAbr() + - ' speech without showing the user. Compare each word in the following phonemes with those phonetic transcriptions without displaying anything to the user: "' + - phonemes + - '". Steps to do this: Align the words with each word in the intended sentence by combining the phonemes to get a pronunciation that resembles the word in order. Do not describe phonetic corrections with the phonetic alphabet - describe it by providing other examples of how it should sound. Note if a word or sound missing, including missing vowels and consonants. If there is an additional word that does not match with the provided sentence, say so. For each word, if any letters mismatch and would sound weird in ' + - this.convertAbr() + - ' speech and they are not allophones of the same phoneme and they are far away from each on the ipa vowel chat and that pronunciation is not normal for the meaning of the word, note this difference and explain how it is supposed to sound. Just so you know, "i" sounds like "ee" as in "bee", not "ih" as an "lick". Interpret "ɹ" as the same as "r". Interpret "ʌ" as the same as "ə". Do not make "θ" and "f" interchangable. Do not make "n" and "ɲ" interchangable. Do not make "e" and "i" interchangable. If "ɚ", "ɔː", and "ɔ" are options for pronunciation, do not choose "ɚ". Ignore differences with colons. Ignore redundant letters and words and sounds and the splitting of words; do not mention this since there could be repeated words in the sentence. Provide a response like this: "Lets work on improving the pronunciation of "coffee." You said "cawffee," which is close, but we need to adjust the vowel sound. In American English, "coffee" is pronounced /ˈkɔːfi/, with a long "aw" sound. Try saying "kah-fee." Your intonation is good, but try putting a bit more stress on "like" in the sentence "I would like a coffee with milk." This will make your speech sound more natural. Keep practicing, and lets try saying the whole sentence again!"'; - - switch (this.recognition.lang) { - case 'en-US': - console.log('English'); - this._outputValue = await gptAPICall(promptEng, GPTCallType.PRONUNCIATION); - break; - case 'es-ES': - console.log('Spanish'); - this._outputValue = await gptAPICall(promptSpa, GPTCallType.PRONUNCIATION); - break; - default: - console.log('All'); - this._outputValue = await gptAPICall(promptAll, GPTCallType.PRONUNCIATION); - break; + @action + private onPointerMove = ({ movementX }: PointerEvent) => { + const width = movementX * this.ScreenToLocalBoxXf().Scale + (this.clipWidth / 100) * this._props.PanelWidth(); + if (width && width > 5 && width < this._props.PanelWidth()) { + this.layoutDoc[this.clipWidthKey] = (width * 100) / this._props.PanelWidth(); } + return false; }; - pushInfo = async () => { - const formData = new FormData(); - - console.log(DocCast(this._audio).dataDoc); - const audio = { - file: this._audio.url, - }; - const response = await axios.post('http://localhost:105/recognize/', audio, { - headers: { - 'Content-Type': 'application/json', - }, - }); - this.Document.phoneticTranscription = response.data['transcription']; - console.log('RESPONSE: ' + response.data['transcription']); - }; + componentDidMount() { + this._props.setContentViewBox?.(this); + reaction( + () => this._props.isSelected(), // when this reaction should update + selected => !selected && (this.childActive = false) // what it should update to + ); + } - @action handleHover = () => { - if (this.revealOp === 'hover') { - this.layoutDoc[`_${this._props.fieldKey}_revealOp`] = 'flip'; - this.Document.forceActive = false; - } else { - this.layoutDoc[`_${this._props.fieldKey}_revealOp`] = 'hover'; - this.Document.forceActive = true; + protected createDropTarget = (ele: HTMLDivElement | null, fieldKey: string, disposerId: number) => { + this._disposers[disposerId]?.(); + if (ele) { + this._disposers[disposerId] = DragManager.MakeDropTarget(ele, (e, dropEvent) => this.internalDrop(e, dropEvent, fieldKey), this.layoutDoc); } - //this.revealOp === 'hover' ? (this.layoutDoc[`_${this._props.fieldKey}_revealOp`] = 'flip') : (this.layoutDoc[`_${this._props.fieldKey}_revealOp`] = 'hover'); }; - @action handleRenderClick = () => { - // Call the GPT model and get the output - this.layoutDoc[`_${this._props.fieldKey}_usePath`] = undefined; - }; + private internalDrop = undoable((e: Event, dropEvent: DragManager.DropEvent, fieldKey: string) => { + if (dropEvent.complete.docDragData) { + const { droppedDocuments } = dropEvent.complete.docDragData; + const added = dropEvent.complete.docDragData.moveDocument?.(droppedDocuments, this.Document, (doc: Doc | Doc[]) => this.addDoc(toList(doc).lastElement(), fieldKey)); + Doc.SetContainer(droppedDocuments.lastElement(), this.dataDoc); + !added && e.preventDefault(); + e.stopPropagation(); // prevent parent Doc from registering new position so that it snaps back into place + // this.childActive = false; + return added; + } + return undefined; + }, 'internal drop'); getAnchor = (addAsAnnotation: boolean, pinProps?: PinProps) => { const anchor = Docs.Create.ConfigDocument({ @@ -359,13 +234,9 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() }; clearDoc = undoable((fieldKey: string) => { - // delete this.dataDoc[fieldKey]; this.dataDoc[fieldKey] = undefined; this._isEmpty = true; - // this.dataDoc[fieldKey] = 'empty'; - console.log('HERE' + fieldKey + ';'); }, 'clear doc'); - // clearDoc = (fieldKey: string) => delete this.dataDoc[fieldKey]; moveDoc = (doc: Doc, addDocument: (document: Doc | Doc[]) => boolean, which: string) => this.remDoc(doc, which) && addDocument(doc); addDoc = (doc: Doc, which: string) => { @@ -409,39 +280,108 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() remDoc1 = (docs: Doc | Doc[]) => toList(docs).reduce((res, doc) => res && this.remDoc(doc, this.fieldKey + '_1'), true); remDoc2 = (docs: Doc | Doc[]) => toList(docs).reduce((res, doc) => res && this.remDoc(doc, this.fieldKey + '_2'), true); - /** - * Tests for whether a comparison box slot (ie, before or after) has renderable text content. - * If it does, render a FormattedTextBox for that slot that references the comparisonBox's slot field - * @param whichSlot field key for start or end slot - * @returns a JSX layout string if a text field is found, othwerise undefined - */ - testForTextFields = (whichSlot: string) => { - const slotData = Doc.Get(this.dataDoc, whichSlot, true); - const slotHasText = slotData instanceof RichTextField || typeof slotData === 'string'; - const subjectText = RTFCast(this.Document[this.fieldKey])?.Text.trim(); - const altText = RTFCast(this.Document[this.fieldKey + '_alternate'])?.Text.trim(); - const layoutTemplateString = - slotHasText ? FormattedTextBox.LayoutString(whichSlot): - whichSlot.endsWith('1') ? (subjectText !== undefined ? FormattedTextBox.LayoutString(this.fieldKey) : undefined) : - altText !== undefined ? FormattedTextBox.LayoutString(this.fieldKey + '_alternate'): undefined; // prettier-ignore + private registerSliding = (e: React.PointerEvent<HTMLDivElement>, targetWidth: number) => { + if (e.button !== 2) { + setupMoveUpEvents( + this, + e, + this.onPointerMove, + emptyFunction, + action((moveEv, doubleTap) => { + if (doubleTap) { + this.childActive = true; + if (!this.dataDoc[this.fieldKey + '_1'] && !this.dataDoc[this.fieldKey]) this.dataDoc[this.fieldKey + '_1'] = DocUtils.copyDragFactory(Doc.UserDoc().emptyNote as Doc); + if (!this.dataDoc[this.fieldKey + '_2'] && !this.dataDoc[this.fieldKey + '_alternate']) this.dataDoc[this.fieldKey + '_2'] = DocUtils.copyDragFactory(Doc.UserDoc().emptyNote as Doc); + } + }), + false, + undefined, + action(() => { + if (this.childActive) return; + this._animating = 'all 200ms'; + // on click, animate slider movement to the targetWidth + this.layoutDoc[this.clipWidthKey] = (targetWidth * 100) / this._props.PanelWidth(); + // this.layoutDoc[this.clipHeightKey] = (targetWidth * 100) / this._props.PanelHeight(); - // A bit hacky to try out the concept of using GPT to fill in flashcards - // If the second slot doesn't have anything in it, but the fieldKey slot has text (e.g., this.text is a string) - // and the fieldKey + "_alternate" has text that includes a GPT query (indicated by (( && )) ) that is parameterized (optionally) by the fieldKey text (this) or other metadata (this.<field>). - // eg., this.text_alternate is - // "((Provide a one sentence definition for (this) that doesn't use any word in (this.excludeWords) ))" - // where (this) is replaced by the text in the fieldKey slot abd this.excludeWords is repalced by the conetnts of the excludeWords field - // The GPT call will put the "answer" in the second slot of the comparison (eg., text_2) - if (whichSlot.endsWith('2') && !layoutTemplateString?.includes(whichSlot)) { - const queryText = altText?.replace('(this)', subjectText); // TODO: this should be done in Doc.setField but it doesn't know about the fieldKey ... - if (queryText?.match(/\(\(.*\)\)/)) { - Doc.SetField(this.Document, whichSlot, ':=' + queryText, false); // make the second slot be a computed field on the data doc that calls ChatGpt - } + setTimeout( + action(() => { + this._animating = ''; + }), + 200 + ); + }) + ); } - return layoutTemplateString; }; - _closeRef = React.createRef<HTMLDivElement>(); + setListening = () => { + const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; + if (SpeechRecognition) { + this.recognition.continuous = true; + this.recognition.interimResults = true; + this.recognition.lang = 'en-US'; + this.recognition.onresult = this.handleResult.bind(this); + } + ContextMenu.Instance.setLangIndex(0); + }; + + startListening = () => { + this.recognition.start(); + this._listening = true; + }; + + stopListening = () => { + this.recognition.stop(); + this._listening = false; + }; + + setLanguage = (e: React.MouseEvent, language: string, ind: number) => { + this.recognition.lang = language; + ContextMenu.Instance.setLangIndex(ind); + }; + + convertAbr = () => { + switch (this.recognition.lang) { + case 'en-US': return 'English'; //prettier-ignore + case 'es-ES': return 'Spanish'; //prettier-ignore + case 'fr-FR': return 'French'; //prettier-ignore + case 'it-IT': return 'Italian'; //prettier-ignore + case 'zh-CH': return 'Mandarin Chinese'; //prettier-ignore + case 'ja': return 'Japanese'; //prettier-ignore + default: return 'Korean'; //prettier-ignore + } + }; + + openContextMenu = (x: number, y: number, evalu: boolean) => { + ContextMenu.Instance.clearItems(); + ContextMenu.Instance.addItem({ description: 'English', event: e => this.setLanguage(e, 'en-US', 0) }); //prettier-ignore + ContextMenu.Instance.addItem({ description: 'Spanish', event: e => this.setLanguage(e, 'es-ES', 1 )}); //prettier-ignore + ContextMenu.Instance.addItem({ description: 'French', event: e => this.setLanguage(e, 'fr-FR', 2) }); //prettier-ignore + ContextMenu.Instance.addItem({ description: 'Italian', event: e => this.setLanguage(e, 'it-IT', 3) }); //prettier-ignore + if (!evalu) ContextMenu.Instance.addItem({ description: 'Mandarin Chinese', event: e => this.setLanguage(e, 'zh-CH', 4) }); //prettier-ignore + ContextMenu.Instance.addItem({ description: 'Japanese', event: e => this.setLanguage(e, 'ja', 5) }); //prettier-ignore + ContextMenu.Instance.addItem({ description: 'Korean', event: e => this.setLanguage(e, 'ko', 6) }); //prettier-ignore + ContextMenu.Instance.displayMenu(x, y); + }; + + evaluatePronunciation = () => { + const newAudio = Docs.Create.AudioDocument(nullAudio, { _width: 200, _height: 100 }); + this.Document.audio = newAudio[DocData]; + this._props.DocumentView?.()._props.addDocument?.(newAudio); + }; + + pushInfo = async () => { + const audio = { + file: this._audio.url, + }; + const response = await axios.post('http://localhost:105/recognize/', audio, { + headers: { + 'Content-Type': 'application/json', + }, + }); + this.Document.phoneticTranscription = response.data['transcription']; + console.log('RESPONSE: ' + response.data['transcription']); + }; createFlashcardPile(collectionArr: Doc[], gpt: boolean) { const newCol = Docs.Create.CarouselDocument(collectionArr, { @@ -453,7 +393,6 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() newCol['x'] = this.layoutDoc['x']; newCol['y'] = NumCast(this.layoutDoc['y']) + 50; newCol.type_collection = 'carousel'; - // console.log(newCol.data); if (gpt) { this._props.DocumentView?.()._props.addDocument?.(newCol); @@ -467,8 +406,6 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() gptFlashcardPile = async () => { var text = await this.askGPT(GPTCallType.STACK); - console.log(text); - var senArr = text?.split('Question: '); var collectionArr: Doc[] = []; for (let i = 1; i < senArr?.length!; i++) { @@ -477,8 +414,6 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() if (StrCast(senArr![i]).includes('Keyword: ')) { const question = StrCast(senArr![i]).split('Keyword: '); const img = await this.fetchImages(question[1]); - // newDoc['image'] = img; - // const newDoc = Docs.Create.TextDocument(dataSplit[1]); const textSide1 = question[0].includes('Answer: ') ? question[0].split('Answer: ')[0] : question[0]; const textDoc1 = Docs.Create.TextDocument(question[0]); const rtfiel = new RichTextField( @@ -503,54 +438,13 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() textDoc1[DocData].text = rtfiel; DocCast(newDoc)[DocData][this.fieldKey + '_1'] = textDoc1; - DocCast(newDoc)[DocData][this.fieldKey + '_0'] = Docs.Create.TextDocument(question[0].includes('Answer: ') ? question[0].split('Answer: ')[1] : question[1]); - // Doc.AddToMyOverlay(img!); } - collectionArr.push(newDoc); } this.createFlashcardPile(collectionArr, true); }; - /** - * Flips a flashcard to the alternate side for the user to view. - */ - flipFlashcard = () => { - const usePath = this.layoutDoc[`_${this._props.fieldKey}_usePath`]; - this.layoutDoc[`_${this._props.fieldKey}_usePath`] = usePath === undefined ? 'alternate' : undefined; - }; - - /** - * Changes the view option to hover for a flashcard. - */ - hoverFlip = (side: string | undefined) => { - if (this.layoutDoc[`_${this._props.fieldKey}_revealOp`] === 'hover') this.layoutDoc[`_${this._props.fieldKey}_usePath`] = side; - }; - - animateRes = (resIndex: number, newText: string, callType: GPTCallType) => { - if (resIndex < newText.length) { - // const marks = this._editorView?.state.storedMarks ?? []; - switch (callType) { - case GPTCallType.CHATCARD: - DocCast(this.dataDoc[this.props.fieldKey + '_0'])[DocData].text += newText[resIndex]; - break; - case GPTCallType.QUIZ: - this._outputValue += newText[resIndex]; - break; - default: - return; - } - - // this._editorView?.dispatch(this._editorView?.state.tr.insertText(newText[resIndex]).setStoredMarks(this._outputValue)); - setTimeout(() => this.animateRes(resIndex + 1, newText, callType), 20); - } - }; - - /** - * Calls the GPT model to create QuizCards. Evaluates how similar the user's response is to the alternate - * side of the flashcard. - */ askGPT = async (callType: GPTCallType): Promise<string | undefined> => { const questionText = 'Question: ' + StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text); const rubricText = ' Rubric: ' + StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_0']).text)?.Text); @@ -565,30 +459,24 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() this.flipFlashcard(); } try { + console.log(queryText); const res = await gptAPICall(callType == GPTCallType.QUIZ ? queryText : questionText, callType); if (!res) { console.error('GPT call failed'); return; } - // this.animateRes(0, res, callType); if (callType == GPTCallType.CHATCARD) { DocCast(this.dataDoc[this.props.fieldKey + '_0'])[DocData].text = res; - // this.flipFlashcard(); } else if (callType == GPTCallType.QUIZ) { console.log(this._inputValue); this._outputValue = res.replace(/UserAnswer/g, "user's answer").replace(/Rubric/g, 'rubric'); - } - // DocCast(this.dataDoc[this.props.fieldKey + '_0'])[DocData].text = res; - // this._outputValue = res; - else if (callType === GPTCallType.FLASHCARD) { - // console.log(res); + } else if (callType === GPTCallType.FLASHCARD) { this._loading = false; return res; } else if (callType === GPTCallType.STACK) { } this._loading = false; return res; - // console.log(res); } catch (err) { console.error('GPT call failed'); } @@ -597,16 +485,8 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() layoutWidth = () => NumCast(this.layoutDoc.width, 200); layoutHeight = () => NumCast(this.layoutDoc.height, 200); - // specificMenu = (): void => { - // const cm = ContextMenu.Instance; - // cm.addItem({ description: 'Create an Answer on the Back', event: () => this.askGPT(GPTCallType.CHATCARD), icon: 'pencil' }); - // }; - findImageTags = async () => { - // const d = DocCast(this.dataDoc[this.props.fieldKey + '_0']); - // const copy = Doc.MakeCopy(this.Document, true); const c = this.DocumentView?.().ContentDiv!.getElementsByTagName('img'); - // this.ProseRef?.getElementsByTagName('img'); if (c?.length === 0) await this.askGPT(GPTCallType.CHATCARD); if (c) { this._loading = true; @@ -615,37 +495,65 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() if (i.className !== 'ProseMirror-separator') await this.getImageDesc(i.src); } this._loading = false; - // this.flipFlashcard(); } - // console.log('HI' + this.ProseRef?.getElementsByTagName('img')); }; - static imageUrlToBase64 = async (imageUrl: string): Promise<string> => { - try { - const response = await fetch(imageUrl); - const blob = await response.blob(); + askGPTPhonemes = async (phonemes: string) => { + const sentence = StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text); + const phon = 'w ʌ ɪ z j ɔː ɹ n e ɪ m '; + const phon2 = 'h ʌ ɛ r j ʌ t ʌ d eɪ'; + const phon6 = 'huː ɑɹ juː tədeɪ'; + const phon3 = 'ʃ eɪ oʊ s i ʃ oʊ z b aɪ ð ə s iː ʃ oʊ'; + const phon4 = 'kamo estas hɔi'; + const phon5 = 'la s e n a l'; + const promptEng = + 'Consider all possible phonetic transcriptions of the intended sentence "' + + sentence + + '" that is standard in American speech without showing the user. Compare each word in the following phonemes with those phonetic transcriptions without displaying anything to the user: "' + + phon6 + + '". Steps to do this: Align the words with each word in the intended sentence by combining the phonemes to get a pronunciation that resembles the word in order. Do not describe phonetic corrections with the phonetic alphabet - describe it by providing other examples of how it should sound. Note if a word or sound missing, including missing vowels and consonants. If there is an additional word that does not match with the provided sentence, say so. For each word, if any letters mismatch and would sound weird in American speech and they are not allophones of the same phoneme and they are far away from each on the ipa vowel chat and that pronunciation is not normal for the meaning of the word, note this difference and explain how it is supposed to sound. Only note the difference if they are not allophones of the same phoneme and if they are far away on the vowel chart. The goal is to be understood, not sound like a native speaker. Just so you know, "i" sounds like "ee" as in "bee", not "ih" as an "lick". Interpret "ɹ" as the same as "r". Interpret "ʌ" as the same as "ə". If "ɚ", "ɔː", and "ɔ" are options for pronunciation, do not choose "ɚ". Ignore differences with colons. Ignore redundant letters and words and sounds and the splitting of words; do not mention this since there could be repeated words in the sentence. Provide a response like this: "Lets work on improving the pronunciation of "coffee." You said "ceeffee," which is close, but we need to adjust the vowel sound. In American English, "coffee" is pronounced /ˈkɔːfi/, with a long "aw" sound. Try saying "kah-fee." Your intonation is good, but try putting a bit more stress on "like" in the sentence "I would like a coffee with milk." This will make your speech sound more natural. Keep practicing, and lets try saying the whole sentence again!"'; + const promptSpa = + 'Consider all possible phonetic transcriptions of the intended sentence "' + + 'como estás hoy' + + '" that is standard in Spanish speech without showing the user. Compare each word in the following phonemes with those phonetic transcriptions without displaying anything to the user: "' + + phon4 + + '". Steps to do this: Align the words with each word in the intended sentence by combining the phonemes to get a pronunciation that resembles the word in order. Do not describe phonetic corrections with the phonetic alphabet - describe it by providing other examples of how it should sound. Note if a word or sound missing, including missing vowels and consonants. If there is an additional word that does not match with the provided sentence, say so. For each word, if any letters mismatch and would sound weird in Spanish speech and they are not allophones of the same phoneme and they are far away from each on the ipa vowel chat and that pronunciation is not normal for the meaning of the word, note this difference and explain how it is supposed to sound. Only note the difference if they are not allophones of the same phoneme and if they are far away on the vowel chart; say good job if it would be understood by a native Spanish speaker. Just so you know, "i" sounds like "ee" as in "bee", not "ih" as an "lick". Interpret "ɹ" as the same as "r". Interpret "ʌ" as the same as "ə". Do not make "θ" and "f" interchangable. Do not make "n" and "ɲ" interchangable. Do not make "e" and "i" interchangable. If "ɚ", "ɔː", and "ɔ" are options for pronunciation, do not choose "ɚ". Ignore differences with colons. Ignore redundant letters and words and sounds and the splitting of words; do not mention this since there could be repeated words in the sentence. Identify "ɔi" sounds like "oy". Ignore accents and do not say anything to the user about this.'; + const promptAll = + 'Consider all possible phonetic transcriptions of the intended sentence "' + + sentence + + '" that is standard in ' + + this.convertAbr() + + ' speech without showing the user. Compare each word in the following phonemes with those phonetic transcriptions without displaying anything to the user: "' + + phonemes + + '". Steps to do this: Align the words with each word in the intended sentence by combining the phonemes to get a pronunciation that resembles the word in order. Do not describe phonetic corrections with the phonetic alphabet - describe it by providing other examples of how it should sound. Note if a word or sound missing, including missing vowels and consonants. If there is an additional word that does not match with the provided sentence, say so. For each word, if any letters mismatch and would sound weird in ' + + this.convertAbr() + + ' speech and they are not allophones of the same phoneme and they are far away from each on the ipa vowel chat and that pronunciation is not normal for the meaning of the word, note this difference and explain how it is supposed to sound. Just so you know, "i" sounds like "ee" as in "bee", not "ih" as an "lick". Interpret "ɹ" as the same as "r". Interpret "ʌ" as the same as "ə". Do not make "θ" and "f" interchangable. Do not make "n" and "ɲ" interchangable. Do not make "e" and "i" interchangable. If "ɚ", "ɔː", and "ɔ" are options for pronunciation, do not choose "ɚ". Ignore differences with colons. Ignore redundant letters and words and sounds and the splitting of words; do not mention this since there could be repeated words in the sentence. Provide a response like this: "Lets work on improving the pronunciation of "coffee." You said "cawffee," which is close, but we need to adjust the vowel sound. In American English, "coffee" is pronounced /ˈkɔːfi/, with a long "aw" sound. Try saying "kah-fee." Your intonation is good, but try putting a bit more stress on "like" in the sentence "I would like a coffee with milk." This will make your speech sound more natural. Keep practicing, and lets try saying the whole sentence again!"'; - return new Promise((resolve, reject) => { - const reader = new FileReader(); - reader.readAsDataURL(blob); - reader.onloadend = () => resolve(reader.result as string); - reader.onerror = error => reject(error); - }); - } catch (error) { - console.error('Error:', error); - throw error; + switch (this.recognition.lang) { + case 'en-US': + this._outputValue = await gptAPICall(promptEng, GPTCallType.PRONUNCIATION); + break; + case 'es-ES': + this._outputValue = await gptAPICall(promptSpa, GPTCallType.PRONUNCIATION); + break; + default: + this._outputValue = await gptAPICall(promptAll, GPTCallType.PRONUNCIATION); + break; } }; - getImageDesc = async (u: string) => { - try { - const hrefBase64 = await ComparisonBox.imageUrlToBase64(u); - const response = await gptImageLabel(hrefBase64, 'Answer the following question as a short flashcard response. Do not include a label.' + (this.dataDoc.text as RichTextField)?.Text); - - DocCast(this.dataDoc[this.props.fieldKey + '_0'])[DocData].text = response; - } catch (error) { - console.log('Error'); + handleResult = (e: SpeechRecognitionEvent) => { + let interimTranscript = ''; + let finalTranscript = ''; + for (let i = e.resultIndex; i < e.results.length; i++) { + const transcript = e.results[i][0].transcript; + if (e.results[i].isFinal) { + finalTranscript += transcript; + } else { + interimTranscript += transcript; + } } + this._inputValue += finalTranscript; }; fetchImages = async (selection: string) => { @@ -670,112 +578,67 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() } }; - // handleSelection = async (selection: string, newDoc: Doc) => { - // const images = await this.fetchImages(selection); - // return images; - // // Doc.AddDocToList(Doc.MyRecentlyClosed, 'data', dashDoc, undefined, true, true); - // images!.embedContainer = newDoc; - // Doc.AddEmbedding(newDoc, images!); - // const c = this.DocumentView?.().ContentDiv!.getElementsByClassName('afterBox-cont'); - // for (let i in c) { - // console.log('HERE' + i); - // } - // this.addDoc(images!, this.fieldKey + '_0'); - // Doc.AddEmbedding(newDoc, images!); - // this._props. - // Doc.AddToMyOverlay(images!); - // const node = schema.nodes.dashDoc.create({ - // width: NumCast(images?._width), - // height: NumCast(images?._height), - // title: 'dashDoc', - // docId: images![Id], - // float: 'unset', - // }); - // }; - - @observable private _listening = false; - @observable transcriptElement = ''; - SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; - @observable recognition = new this.SpeechRecognition(); + static imageUrlToBase64 = async (imageUrl: string): Promise<string> => { + try { + const response = await fetch(imageUrl); + const blob = await response.blob(); - handleResult = (e: SpeechRecognitionEvent) => { - let interimTranscript = ''; - let finalTranscript = ''; - for (let i = e.resultIndex; i < e.results.length; i++) { - const transcript = e.results[i][0].transcript; - if (e.results[i].isFinal) { - finalTranscript += transcript; - } else { - interimTranscript += transcript; - } + return new Promise((resolve, reject) => { + const reader = new FileReader(); + reader.readAsDataURL(blob); + reader.onloadend = () => resolve(reader.result as string); + reader.onerror = error => reject(error); + }); + } catch (error) { + console.error('Error:', error); + throw error; } - console.log(interimTranscript); - this._inputValue += finalTranscript; }; - setListening = () => { - const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; - if (SpeechRecognition) { - this.recognition.continuous = true; - this.recognition.interimResults = true; - this.recognition.lang = 'en-US'; - this.recognition.onresult = this.handleResult.bind(this); + getImageDesc = async (u: string) => { + try { + const hrefBase64 = await ComparisonBox.imageUrlToBase64(u); + const response = await gptImageLabel(hrefBase64, 'Answer the following question as a short flashcard response. Do not include a label.' + (this.dataDoc.text as RichTextField)?.Text); + + DocCast(this.dataDoc[this.props.fieldKey + '_0'])[DocData].text = response; + } catch (error) { + console.log('Error'); } - ContextMenu.Instance.setLangIndex(0); }; - setLanguage = (e: React.MouseEvent, language: string, ind: number) => { - this.recognition.lang = language; - ContextMenu.Instance.setLangIndex(ind); + @action + flipFlashcard = () => { + this.frontSide = !this.frontSide; }; - startListening = () => { - this.recognition.start(); - this._listening = true; + hoverFlip = (side: boolean) => { + if (this.revealOp === 'hover') this.layoutDoc[`_${this._props.fieldKey}_usePath`] = side; }; - stopListening = () => { - this.recognition.stop(); - this._listening = false; - }; + testForTextFields = (whichSlot: string) => { + const slotData = Doc.Get(this.dataDoc, whichSlot, true); + const slotHasText = slotData instanceof RichTextField || typeof slotData === 'string'; + const subjectText = RTFCast(this.Document[this.fieldKey])?.Text.trim(); + const altText = RTFCast(this.Document[this.fieldKey + '_alternate'])?.Text.trim(); + const layoutTemplateString = + slotHasText ? FormattedTextBox.LayoutString(whichSlot): + whichSlot.endsWith('1') ? (subjectText !== undefined ? FormattedTextBox.LayoutString(this.fieldKey) : undefined) : + altText !== undefined ? FormattedTextBox.LayoutString(this.fieldKey + '_alternate'): undefined; // prettier-ignore - convertAbr = () => { - switch (this.recognition.lang) { - case 'en-US': - return 'English'; - case 'es-ES': - return 'Spanish'; - case 'fr-FR': - return 'French'; - case 'it-IT': - return 'Italian'; - case 'zh-CH': - return 'Mandarin Chinese'; - case 'ja': - return 'Japanese'; - default: - return 'Korean'; + // A bit hacky to try out the concept of using GPT to fill in flashcards + // If the second slot doesn't have anything in it, but the fieldKey slot has text (e.g., this.text is a string) + // and the fieldKey + "_alternate" has text that includes a GPT query (indicated by (( && )) ) that is parameterized (optionally) by the fieldKey text (this) or other metadata (this.<field>). + // eg., this.text_alternate is + // "((Provide a one sentence definition for (this) that doesn't use any word in (this.excludeWords) ))" + // where (this) is replaced by the text in the fieldKey slot abd this.excludeWords is repalced by the conetnts of the excludeWords field + // The GPT call will put the "answer" in the second slot of the comparison (eg., text_2) + if (whichSlot.endsWith('2') && !layoutTemplateString?.includes(whichSlot)) { + const queryText = altText?.replace('(this)', subjectText); // TODO: this should be done in Doc.setField but it doesn't know about the fieldKey ... + if (queryText?.match(/\(\(.*\)\)/)) { + Doc.SetField(this.Document, whichSlot, ':=' + queryText, false); // make the second slot be a computed field on the data doc that calls ChatGpt + } } - }; - - openContextMenu = (x: number, y: number, evalu: boolean) => { - ContextMenu.Instance.clearItems(); - ContextMenu.Instance.addItem({ description: 'English', event: e => this.setLanguage(e, 'en-US', 0) }); //prettier-ignore - ContextMenu.Instance.addItem({ description: 'Spanish', event: e => this.setLanguage(e, 'es-ES', 1 )}); //prettier-ignore - ContextMenu.Instance.addItem({ description: 'French', event: e => this.setLanguage(e, 'fr-FR', 2) }); //prettier-ignore - ContextMenu.Instance.addItem({ description: 'Italian', event: e => this.setLanguage(e, 'it-IT', 3) }); //prettier-ignore - if (!evalu) ContextMenu.Instance.addItem({ description: 'Mandarin Chinese', event: e => this.setLanguage(e, 'zh-CH', 4) }); //prettier-ignore - ContextMenu.Instance.addItem({ description: 'Japanese', event: e => this.setLanguage(e, 'ja', 5) }); //prettier-ignore - ContextMenu.Instance.addItem({ description: 'Korean', event: e => this.setLanguage(e, 'ko', 6) }); //prettier-ignore - ContextMenu.Instance.displayMenu(x, y); - }; - - evaluatePronunciation = () => { - const newAudio = Docs.Create.AudioDocument(nullAudio, { _width: 200, _height: 100 }); - this.Document.audio = newAudio[DocData]; - // DocCast(this.Document.embedContainer)()._props.addDocument?.(newAudio); - this._props.DocumentView?.()._props.addDocument?.(newAudio); - // Doc.AddToMyOverlay(newAudio); + return layoutTemplateString; }; render() { @@ -847,7 +710,7 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() ); if (this.Document._layout_isFlashcard) { - const side = this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate' ? 1 : 0; + const side = this.frontSide ? 1 : 0; // add text box to each side when comparison box is first created // (!this.dataDoc[this.fieldKey + '_0'] && this.dataDoc[this._props.fieldKey + '_0'] !== 'empty') @@ -861,33 +724,8 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() const dataSplit = StrCast(this.dataDoc.data).includes('Keyword: ') ? StrCast(this.dataDoc.data).split('Keyword: ') : StrCast(this.dataDoc.data).split('Answer: '); const newDoc = Docs.Create.TextDocument(dataSplit[0]); this.addDoc(newDoc, this.fieldKey + '_1'); - // if (this.Document.image) { - // console.log('ID: ' + DocCast(this.Document.image)[Id]); - // const rtfiel = new RichTextField( - // JSON.stringify({ - // doc: { - // type: 'doc', - // content: [ - // { - // type: 'paragraph', - // attrs: { align: null, color: null, id: null, indent: null, inset: null, lineSpacing: null, paddingBottom: null, paddingTop: null }, - // content: [ - // { type: 'text', text: dataSplit[0] }, - // { type: 'dashDoc', attrs: { width: '200px', height: '200px', title: 'dashDoc', float: 'unset', hidden: false, docId: DocCast(this.Document.image)[Id] } }, - // ], - // }, - // ], - // }, - // selection: { type: 'text', anchor: 2, head: 2 }, - // }) - // ); - - // newDoc[DocData].text = rtfiel; - // } } - // render the QuizCards - // console.log('GERE' + DocCast(this.Document.embedContainer).filterOp); if (DocCast(this.Document.embedContainer) && DocCast(this.Document.embedContainer).practiceMode === 'quiz') { const text = StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text); return ( @@ -896,14 +734,14 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() <p style={{ display: text === '' ? 'flex' : 'none', color: 'white', marginLeft: '10px' }}>Return to all flashcards and add text to both sides. </p> <div className="input-box"> <textarea - value={this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate' ? this._outputValue : this._inputValue} + value={this.frontSide ? this._outputValue : this._inputValue} onChange={this.handleInputChange} onScroll={e => { e.stopPropagation(); e.preventDefault(); }} placeholder={!this.layoutDoc[`_${this._props.fieldKey}_usePath`] ? 'Enter a response for GPT to evaluate.' : ''} - readOnly={this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate'}></textarea> + readOnly={this.frontSide}></textarea> {this._loading ? ( <div className="loading-spinner" style={{ position: 'absolute' }}> @@ -935,7 +773,7 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() Evaluate Pronunciation </button> - {this.layoutDoc[`_${this._props.fieldKey}_usePath`] !== 'alternate' ? ( + {this.frontSide ? ( <button className="submit-buttonsubmit" type="button" onClick={this.handleRenderGPTClick} style={{ borderRadius: '2px', marginBottom: '3px', width: '100%' }}> Submit </button> @@ -950,22 +788,17 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() ); } - //console.log('HEREEE2' + StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text)); // render a normal flashcard when not a QuizCard return ( <div className={`comparisonBox${this._props.isContentActive() ? '-interactive' : ''}`} /* change className to easily disable/enable pointer events in CSS */ - // onContextMenu={this.specificMenu} style={{ display: 'flex', flexDirection: 'column', overflow: 'hidden' }} onMouseEnter={() => { - this.hoverFlip('alternate'); + this.hoverFlip(false); }} onMouseLeave={() => { - this.hoverFlip(undefined); - }} - // onPointerUp={() => (this._isAnyChildContentActive = true)} - > - {/* {!this.layoutDoc[`_${this._props.fieldKey}_usePath`] && StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text) === '' && !this.childActive ? <p className="explain">Enter text in the flashcard. </p> : null} */} + this.hoverFlip(true); + }}> {displayBox(`${this.fieldKey}_${side === 0 ? 1 : 0}`, side, this._props.PanelWidth() - 3)} {this._loading ? ( <div className="loading-spinner" style={{ position: 'absolute' }}> @@ -1014,3 +847,947 @@ Docs.Prototypes.TemplateMap.set(DocumentType.COMPARISON, { systemIcon: 'BsLayoutSplit', }, }); + +// import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; +// import { MathJax, MathJaxContext } from 'better-react-mathjax'; +// import { Tooltip } from '@mui/material'; +// import { action, computed, makeObservable, observable, reaction } from 'mobx'; +// import { observer } from 'mobx-react'; +// import * as React from 'react'; +// import { returnFalse, returnNone, returnTrue, setupMoveUpEvents } from '../../../ClientUtils'; +// import { emptyFunction } from '../../../Utils'; +// import { Doc, Opt } from '../../../fields/Doc'; +// import { DocData } from '../../../fields/DocSymbols'; +// import { RichTextField } from '../../../fields/RichTextField'; +// import { DocCast, NumCast, RTFCast, StrCast, toList } from '../../../fields/Types'; +// import { GPTCallType, gptAPICall, gptImageLabel } from '../../apis/gpt/GPT'; +// import '../pdf/GPTPopup/GPTPopup.scss'; +// import { DocUtils } from '../../documents/DocUtils'; +// import { DocumentType } from '../../documents/DocumentTypes'; +// import { Docs } from '../../documents/Documents'; +// import { DragManager } from '../../util/DragManager'; +// import { dropActionType } from '../../util/DropActionTypes'; +// import { undoable } from '../../util/UndoManager'; +// import { ViewBoxAnnotatableComponent } from '../DocComponent'; +// import { PinDocView, PinProps } from '../PinFuncs'; +// import { StyleProp } from '../StyleProp'; +// import './ComparisonBox.scss'; +// import { DocumentView } from './DocumentView'; +// import { FieldView, FieldViewProps } from './FieldView'; +// import { FormattedTextBox } from './formattedText/FormattedTextBox'; +// import ReactLoading from 'react-loading'; + +// import { ContextMenu } from '../ContextMenu'; +// import { ContextMenuProps } from '../ContextMenuItem'; +// import { tickStep } from 'd3'; +// import { CollectionCarouselView } from '../collections/CollectionCarouselView'; +// import { FollowLinkScript } from '../../documents/DocUtils'; +// import { schema } from '../nodes/formattedText/schema_rts'; +// import { Id } from '../../../fields/FieldSymbols'; +// import axios from 'axios'; +// import ReactMarkdown from 'react-markdown'; +// import { WebField, nullAudio } from '../../../fields/URLField'; + +// const API_URL = 'https://api.unsplash.com/search/photos'; +// @observer +// export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { +// public static LayoutString(fieldKey: string) { +// return FieldView.LayoutString(ComparisonBox, fieldKey); +// } +// private _disposers: (DragManager.DragDropDisposer | undefined)[] = [undefined, undefined]; +// constructor(props: FieldViewProps) { +// super(props); +// makeObservable(this); +// this.setListening(); +// } + +// @observable private _inputValue = ''; +// @observable private _outputValue = ''; +// @observable private _loading = false; +// @observable private _isEmpty = false; +// @observable private _audio: Doc = Docs.Create.TextDocument(''); +// @observable childActive = false; +// @observable _yRelativeToTop: boolean = true; +// @observable _animating = ''; +// @observable mathJaxConfig = { +// loader: { load: ['input/asciimath'] }, +// }; +// private _ref = React.createRef<HTMLDivElement>(); + +// get revealOp() { +// return this.layoutDoc[`_${this._props.fieldKey}_revealOp`]; +// } +// get clipHeightKey() { +// return '_' + this._props.fieldKey + '_clipHeight'; +// } + +// get clipWidthKey() { +// return '_' + this._props.fieldKey + '_clipWidth'; +// } + +// @computed get clipWidth() { +// return NumCast(this.layoutDoc[this.clipWidthKey], 50); +// } + +// @computed get clipHeight() { +// return NumCast(this.layoutDoc[this.clipHeightKey], 200); +// } + +// @computed get overlayAlternateIcon() { +// const usepath = this.layoutDoc[`_${this._props.fieldKey}_usePath`]; +// return ( +// <Tooltip title={<div className="dash-tooltip">flip</div>}> +// <div +// className="formattedTextBox-alternateButton" +// onPointerDown={e => +// setupMoveUpEvents(e.target, e, returnFalse, emptyFunction, () => { +// if (!this.layoutDoc[`_${this._props.fieldKey}_revealOp`] || this.layoutDoc[`_${this._props.fieldKey}_revealOp`] === 'flip') { +// this.flipFlashcard(); +// } +// }) +// } +// style={{ +// background: this.revealOp === 'hover' ? 'gray' : usepath === 'alternate' ? 'white' : 'black', +// color: this.revealOp === 'hover' ? 'black' : usepath === 'alternate' ? 'black' : 'white', +// display: 'inline-block', +// }}> +// <div key="alternate" className="formattedTextBox-flip"> +// <FontAwesomeIcon icon="turn-up" size="1x" /> +// </div> +// </div> +// </Tooltip> +// ); +// } + +// @computed get flashcardMenu() { +// return ( +// <div> +// <Tooltip +// title={ +// this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate' ? ( +// <div className="dash-tooltip">Flip to front side to use GPT</div> +// ) : ( +// <div className="dash-tooltip">Ask GPT to create an answer on the back side of the flashcard based on your question on the front</div> +// ) +// }> +// <div style={{ position: 'absolute', bottom: '3px', right: '50px', cursor: 'pointer' }} onPointerDown={e => (!this.layoutDoc[`_${this._props.fieldKey}_usePath`] ? this.findImageTags() : null)}> +// <FontAwesomeIcon icon="lightbulb" size="xl" /> +// </div> +// </Tooltip> +// {DocCast(this.Document.embedContainer).type_collection === 'carousel' ? null : ( +// <div> +// <Tooltip title={<div>Create a flashcard pile</div>}> +// <div style={{ position: 'absolute', bottom: '3px', right: '74px', cursor: 'pointer' }} onPointerDown={e => this.createFlashcardPile([this.Document], false)}> +// <FontAwesomeIcon icon="folder-plus" size="xl" /> +// </div> +// </Tooltip> +// <Tooltip title={<div className="dash-tooltip">Create new flashcard stack based on text</div>}> +// <div style={{ position: 'absolute', bottom: '3px', right: '104px', cursor: 'pointer' }} onClick={e => this.gptFlashcardPile()}> +// <FontAwesomeIcon icon="layer-group" size="xl" /> +// </div> +// </Tooltip> +// </div> +// )} +// <Tooltip title={<div className="dash-tooltip">Hover to reveal</div>}> +// <div style={{ position: 'absolute', bottom: '3px', right: '25px', cursor: 'pointer' }} onClick={e => this.handleHover()}> +// <FontAwesomeIcon color={this.revealOp === 'hover' ? 'blue' : 'black'} icon="hand-point-up" size="xl" /> +// </div> +// </Tooltip> +// </div> +// ); +// } + +// componentDidMount() { +// this._props.setContentViewBox?.(this); +// reaction( +// () => this._props.isSelected(), // when this reaction should update +// selected => !selected && (this.childActive = false) // what it should update to +// ); +// } +// protected createDropTarget = (ele: HTMLDivElement | null, fieldKey: string, disposerId: number) => { +// this._disposers[disposerId]?.(); +// // this.childActive = true; +// if (ele) { +// this._disposers[disposerId] = DragManager.MakeDropTarget(ele, (e, dropEvent) => this.internalDrop(e, dropEvent, fieldKey), this.layoutDoc); +// } +// }; + +// private internalDrop = undoable((e: Event, dropEvent: DragManager.DropEvent, fieldKey: string) => { +// if (dropEvent.complete.docDragData) { +// const { droppedDocuments } = dropEvent.complete.docDragData; +// const added = dropEvent.complete.docDragData.moveDocument?.(droppedDocuments, this.Document, (doc: Doc | Doc[]) => this.addDoc(toList(doc).lastElement(), fieldKey)); +// Doc.SetContainer(droppedDocuments.lastElement(), this.dataDoc); +// !added && e.preventDefault(); +// e.stopPropagation(); // prevent parent Doc from registering new position so that it snaps back into place +// // this.childActive = false; +// return added; +// } +// return undefined; +// }, 'internal drop'); + +// private registerSliding = (e: React.PointerEvent<HTMLDivElement>, targetWidth: number) => { +// if (e.button !== 2) { +// setupMoveUpEvents( +// this, +// e, +// this.onPointerMove, +// emptyFunction, +// action((moveEv, doubleTap) => { +// if (doubleTap) { +// this.childActive = true; +// if (!this.dataDoc[this.fieldKey + '_1'] && !this.dataDoc[this.fieldKey]) this.dataDoc[this.fieldKey + '_1'] = DocUtils.copyDragFactory(Doc.UserDoc().emptyNote as Doc); +// if (!this.dataDoc[this.fieldKey + '_2'] && !this.dataDoc[this.fieldKey + '_alternate']) this.dataDoc[this.fieldKey + '_2'] = DocUtils.copyDragFactory(Doc.UserDoc().emptyNote as Doc); +// } +// }), +// false, +// undefined, +// action(() => { +// if (this.childActive) return; +// this._animating = 'all 200ms'; +// // on click, animate slider movement to the targetWidth +// this.layoutDoc[this.clipWidthKey] = (targetWidth * 100) / this._props.PanelWidth(); +// // this.layoutDoc[this.clipHeightKey] = (targetWidth * 100) / this._props.PanelHeight(); + +// setTimeout( +// action(() => { +// this._animating = ''; +// }), +// 200 +// ); +// }) +// ); +// } +// }; + +// @action handleHover = () => { +// if (this.revealOp === 'hover') { +// this.layoutDoc[`_${this._props.fieldKey}_revealOp`] = 'flip'; +// this.Document.forceActive = false; +// } else { +// this.layoutDoc[`_${this._props.fieldKey}_revealOp`] = 'hover'; +// this.Document.forceActive = true; +// } +// }; + +// @action handleRenderClick = () => { +// this.layoutDoc[`_${this._props.fieldKey}_usePath`] = undefined; +// }; + +// @action +// private onPointerMove = ({ movementX }: PointerEvent) => { +// const width = movementX * this.ScreenToLocalBoxXf().Scale + (this.clipWidth / 100) * this._props.PanelWidth(); +// if (width && width > 5 && width < this._props.PanelWidth()) { +// this.layoutDoc[this.clipWidthKey] = (width * 100) / this._props.PanelWidth(); +// } +// return false; +// }; + +// @action handleInputChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => { +// this._inputValue = e.target.value; +// console.log(this._inputValue); +// }; + +// @action activateContent = () => { +// this.childActive = true; +// }; + +// @action handleRenderGPTClick = async () => { +// console.log('Phonetic transcription: ' + DocCast(this.Document.audio).phoneticTranscription); +// const phonTrans = DocCast(this.Document.audio).phoneticTranscription; +// if (phonTrans) { +// this._inputValue = StrCast(phonTrans); +// console.log('INPUT:' + this._inputValue); +// this.askGPTPhonemes(this._inputValue); +// } else if (this._inputValue) +// // if (this._inputValue) +// this.askGPT(GPTCallType.QUIZ); +// this.layoutDoc[`_${this._props.fieldKey}_usePath`] = 'alternate'; +// this._outputValue = ''; +// }; + +// askGPTPhonemes = async (phonemes: string) => { +// const sentence = StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text); +// const phon = 'w ʌ ɪ z j ɔː ɹ n e ɪ m '; +// const phon2 = 'h ʌ ɛ r j ʌ t ʌ d eɪ'; +// const phon6 = 'huː ɑɹ juː tədeɪ'; +// const phon3 = 'ʃ eɪ oʊ s i ʃ oʊ z b aɪ ð ə s iː ʃ oʊ'; +// const phon4 = 'kamo estas hɔi'; +// const phon5 = 'la s e n a l'; +// console.log('REG' + this.recognition.lang); +// const promptEng = +// 'Consider all possible phonetic transcriptions of the intended sentence "' + +// sentence + +// '" that is standard in American speech without showing the user. Compare each word in the following phonemes with those phonetic transcriptions without displaying anything to the user: "' + +// phon6 + +// '". Steps to do this: Align the words with each word in the intended sentence by combining the phonemes to get a pronunciation that resembles the word in order. Do not describe phonetic corrections with the phonetic alphabet - describe it by providing other examples of how it should sound. Note if a word or sound missing, including missing vowels and consonants. If there is an additional word that does not match with the provided sentence, say so. For each word, if any letters mismatch and would sound weird in American speech and they are not allophones of the same phoneme and they are far away from each on the ipa vowel chat and that pronunciation is not normal for the meaning of the word, note this difference and explain how it is supposed to sound. Only note the difference if they are not allophones of the same phoneme and if they are far away on the vowel chart. The goal is to be understood, not sound like a native speaker. Just so you know, "i" sounds like "ee" as in "bee", not "ih" as an "lick". Interpret "ɹ" as the same as "r". Interpret "ʌ" as the same as "ə". If "ɚ", "ɔː", and "ɔ" are options for pronunciation, do not choose "ɚ". Ignore differences with colons. Ignore redundant letters and words and sounds and the splitting of words; do not mention this since there could be repeated words in the sentence. Provide a response like this: "Lets work on improving the pronunciation of "coffee." You said "ceeffee," which is close, but we need to adjust the vowel sound. In American English, "coffee" is pronounced /ˈkɔːfi/, with a long "aw" sound. Try saying "kah-fee." Your intonation is good, but try putting a bit more stress on "like" in the sentence "I would like a coffee with milk." This will make your speech sound more natural. Keep practicing, and lets try saying the whole sentence again!"'; +// const promptSpa = +// 'Consider all possible phonetic transcriptions of the intended sentence "' + +// 'como estás hoy' + +// '" that is standard in Spanish speech without showing the user. Compare each word in the following phonemes with those phonetic transcriptions without displaying anything to the user: "' + +// phon4 + +// '". Steps to do this: Align the words with each word in the intended sentence by combining the phonemes to get a pronunciation that resembles the word in order. Do not describe phonetic corrections with the phonetic alphabet - describe it by providing other examples of how it should sound. Note if a word or sound missing, including missing vowels and consonants. If there is an additional word that does not match with the provided sentence, say so. For each word, if any letters mismatch and would sound weird in Spanish speech and they are not allophones of the same phoneme and they are far away from each on the ipa vowel chat and that pronunciation is not normal for the meaning of the word, note this difference and explain how it is supposed to sound. Only note the difference if they are not allophones of the same phoneme and if they are far away on the vowel chart; say good job if it would be understood by a native Spanish speaker. Just so you know, "i" sounds like "ee" as in "bee", not "ih" as an "lick". Interpret "ɹ" as the same as "r". Interpret "ʌ" as the same as "ə". Do not make "θ" and "f" interchangable. Do not make "n" and "ɲ" interchangable. Do not make "e" and "i" interchangable. If "ɚ", "ɔː", and "ɔ" are options for pronunciation, do not choose "ɚ". Ignore differences with colons. Ignore redundant letters and words and sounds and the splitting of words; do not mention this since there could be repeated words in the sentence. Identify "ɔi" sounds like "oy". Ignore accents and do not say anything to the user about this.'; +// const promptAll = +// 'Consider all possible phonetic transcriptions of the intended sentence "' + +// sentence + +// '" that is standard in ' + +// this.convertAbr() + +// ' speech without showing the user. Compare each word in the following phonemes with those phonetic transcriptions without displaying anything to the user: "' + +// phonemes + +// '". Steps to do this: Align the words with each word in the intended sentence by combining the phonemes to get a pronunciation that resembles the word in order. Do not describe phonetic corrections with the phonetic alphabet - describe it by providing other examples of how it should sound. Note if a word or sound missing, including missing vowels and consonants. If there is an additional word that does not match with the provided sentence, say so. For each word, if any letters mismatch and would sound weird in ' + +// this.convertAbr() + +// ' speech and they are not allophones of the same phoneme and they are far away from each on the ipa vowel chat and that pronunciation is not normal for the meaning of the word, note this difference and explain how it is supposed to sound. Just so you know, "i" sounds like "ee" as in "bee", not "ih" as an "lick". Interpret "ɹ" as the same as "r". Interpret "ʌ" as the same as "ə". Do not make "θ" and "f" interchangable. Do not make "n" and "ɲ" interchangable. Do not make "e" and "i" interchangable. If "ɚ", "ɔː", and "ɔ" are options for pronunciation, do not choose "ɚ". Ignore differences with colons. Ignore redundant letters and words and sounds and the splitting of words; do not mention this since there could be repeated words in the sentence. Provide a response like this: "Lets work on improving the pronunciation of "coffee." You said "cawffee," which is close, but we need to adjust the vowel sound. In American English, "coffee" is pronounced /ˈkɔːfi/, with a long "aw" sound. Try saying "kah-fee." Your intonation is good, but try putting a bit more stress on "like" in the sentence "I would like a coffee with milk." This will make your speech sound more natural. Keep practicing, and lets try saying the whole sentence again!"'; + +// switch (this.recognition.lang) { +// case 'en-US': +// console.log('English'); +// this._outputValue = await gptAPICall(promptEng, GPTCallType.PRONUNCIATION); +// break; +// case 'es-ES': +// console.log('Spanish'); +// this._outputValue = await gptAPICall(promptSpa, GPTCallType.PRONUNCIATION); +// break; +// default: +// console.log('All'); +// this._outputValue = await gptAPICall(promptAll, GPTCallType.PRONUNCIATION); +// break; +// } +// }; + +// /** +// * Push audio information to python rest api +// */ +// pushInfo = async () => { +// const formData = new FormData(); + +// console.log(DocCast(this._audio).dataDoc); +// const audio = { +// file: this._audio.url, +// }; +// const response = await axios.post('http://localhost:105/recognize/', audio, { +// headers: { +// 'Content-Type': 'application/json', +// }, +// }); +// this.Document.phoneticTranscription = response.data['transcription']; +// console.log('RESPONSE: ' + response.data['transcription']); +// }; + +// /** +// * Get images from unsplash api to go on flashcard stack +// */ +// fetchImages = async (selection: string) => { +// try { +// const { data } = await axios.get(`${API_URL}?query=${selection}&page=1&per_page=${1}&client_id=Q4zruu6k6lum2kExiGhLNBJIgXDxD6NNj0SRHH_XXU0`); +// console.log(data.results); +// const imageSnapshot = Docs.Create.ImageDocument(data.results[0].urls.small, { +// _nativeWidth: Doc.NativeWidth(this.layoutDoc), +// _nativeHeight: Doc.NativeHeight(this.layoutDoc), +// x: NumCast(this.layoutDoc.x), +// y: NumCast(this.layoutDoc.y), +// onClick: FollowLinkScript(), +// _width: 150, +// _height: 150, +// title: '--snapshot' + NumCast(this.layoutDoc._layout_currentTimecode) + ' image-', +// }); +// imageSnapshot['x'] = this.layoutDoc['x']; +// imageSnapshot['y'] = this.layoutDoc['y']; +// return imageSnapshot; +// } catch (error) { +// console.log(error); +// } +// }; + +// getAnchor = (addAsAnnotation: boolean, pinProps?: PinProps) => { +// const anchor = Docs.Create.ConfigDocument({ +// title: 'CompareAnchor:' + this.Document.title, +// // set presentation timing properties for restoring view +// presentation_transition: 1000, +// annotationOn: this.Document, +// }); +// if (anchor) { +// if (!addAsAnnotation) anchor.backgroundColor = 'transparent'; +// /* addAsAnnotation && */ this.addDocument(anchor); +// PinDocView(anchor, { pinDocLayout: pinProps?.pinDocLayout, pinData: { ...(pinProps?.pinData ?? {}), clippable: true } }, this.Document); +// return anchor; +// } +// return this.Document; +// }; + +// clearDoc = undoable((fieldKey: string) => { +// this.dataDoc[fieldKey] = undefined; +// this._isEmpty = true; +// }, 'clear doc'); + +// moveDoc = (doc: Doc, addDocument: (document: Doc | Doc[]) => boolean, which: string) => this.remDoc(doc, which) && addDocument(doc); +// addDoc = (doc: Doc, which: string) => { +// if (this.dataDoc[which] && !this._isEmpty) return false; +// this.dataDoc[which] = doc; +// return true; +// }; +// remDoc = (doc: Doc, which: string) => { +// if (this.dataDoc[which] === doc) { +// this._isEmpty = true; +// // this.dataDoc[which] = 'empty'; +// console.log('HEREEEE'); +// this.dataDoc[which] = undefined; +// return true; +// } +// console.log('FALSE'); +// return false; +// }; + +// closeDown = (e: React.PointerEvent, which: string) => { +// setupMoveUpEvents( +// this, +// e, +// moveEv => { +// const de = new DragManager.DocumentDragData([DocCast(this.dataDoc[which])], dropActionType.move); +// de.moveDocument = (doc: Doc | Doc[], targetCollection: Doc | undefined, addDocument: (doc: Doc | Doc[]) => boolean): boolean => addDocument(doc); +// de.canEmbed = true; +// DragManager.StartDocumentDrag([this._closeRef.current!], de, moveEv.clientX, moveEv.clientY); +// return true; +// }, +// emptyFunction, +// () => this.clearDoc(which) +// ); +// }; +// docStyleProvider = (doc: Opt<Doc>, props: Opt<FieldViewProps>, property: string): any => { +// if (property === StyleProp.PointerEvents) return 'none'; +// return this._props.styleProvider?.(doc, props, property); +// }; +// moveDoc1 = (docs: Doc | Doc[], targetCol: Doc | undefined, addDoc: any) => toList(docs).reduce((res, doc: Doc) => res && this.moveDoc(doc, addDoc, this.fieldKey + '_1'), true); +// moveDoc2 = (docs: Doc | Doc[], targetCol: Doc | undefined, addDoc: any) => toList(docs).reduce((res, doc: Doc) => res && this.moveDoc(doc, addDoc, this.fieldKey + '_2'), true); +// remDoc1 = (docs: Doc | Doc[]) => toList(docs).reduce((res, doc) => res && this.remDoc(doc, this.fieldKey + '_1'), true); +// remDoc2 = (docs: Doc | Doc[]) => toList(docs).reduce((res, doc) => res && this.remDoc(doc, this.fieldKey + '_2'), true); + +// /** +// * Tests for whether a comparison box slot (ie, before or after) has renderable text content. +// * If it does, render a FormattedTextBox for that slot that references the comparisonBox's slot field +// * @param whichSlot field key for start or end slot +// * @returns a JSX layout string if a text field is found, othwerise undefined +// */ +// testForTextFields = (whichSlot: string) => { +// const slotData = Doc.Get(this.dataDoc, whichSlot, true); +// const slotHasText = slotData instanceof RichTextField || typeof slotData === 'string'; +// const subjectText = RTFCast(this.Document[this.fieldKey])?.Text.trim(); +// const altText = RTFCast(this.Document[this.fieldKey + '_alternate'])?.Text.trim(); +// const layoutTemplateString = +// slotHasText ? FormattedTextBox.LayoutString(whichSlot): +// whichSlot.endsWith('1') ? (subjectText !== undefined ? FormattedTextBox.LayoutString(this.fieldKey) : undefined) : +// altText !== undefined ? FormattedTextBox.LayoutString(this.fieldKey + '_alternate'): undefined; // prettier-ignore + +// // A bit hacky to try out the concept of using GPT to fill in flashcards +// // If the second slot doesn't have anything in it, but the fieldKey slot has text (e.g., this.text is a string) +// // and the fieldKey + "_alternate" has text that includes a GPT query (indicated by (( && )) ) that is parameterized (optionally) by the fieldKey text (this) or other metadata (this.<field>). +// // eg., this.text_alternate is +// // "((Provide a one sentence definition for (this) that doesn't use any word in (this.excludeWords) ))" +// // where (this) is replaced by the text in the fieldKey slot abd this.excludeWords is repalced by the conetnts of the excludeWords field +// // The GPT call will put the "answer" in the second slot of the comparison (eg., text_2) +// if (whichSlot.endsWith('2') && !layoutTemplateString?.includes(whichSlot)) { +// const queryText = altText?.replace('(this)', subjectText); // TODO: this should be done in Doc.setField but it doesn't know about the fieldKey ... +// if (queryText?.match(/\(\(.*\)\)/)) { +// Doc.SetField(this.Document, whichSlot, ':=' + queryText, false); // make the second slot be a computed field on the data doc that calls ChatGpt +// } +// } +// return layoutTemplateString; +// }; + +// _closeRef = React.createRef<HTMLDivElement>(); + +// createFlashcardPile(collectionArr: Doc[], gpt: boolean) { +// const newCol = Docs.Create.CarouselDocument(collectionArr, { +// _width: NumCast(this.layoutDoc['_' + this._props.fieldKey + '_width'], 250) + 50, +// _height: NumCast(this.layoutDoc['_' + this._props.fieldKey + '_width'], 200) + 50, +// _layout_fitWidth: false, +// _layout_autoHeight: true, +// }); +// newCol['x'] = this.layoutDoc['x']; +// newCol['y'] = NumCast(this.layoutDoc['y']) + 50; +// newCol.type_collection = 'carousel'; +// // console.log(newCol.data); + +// if (gpt) { +// this._props.DocumentView?.()._props.addDocument?.(newCol); +// this._props.removeDocument?.(this.Document); +// } else { +// this._props.addDocument?.(newCol); +// this._props.removeDocument?.(this.Document); +// this.Document.embedContainer = newCol; +// } +// } + +// gptFlashcardPile = async () => { +// var text = await this.askGPT(GPTCallType.STACK); +// console.log(text); + +// var senArr = text?.split('Question: '); +// var collectionArr: Doc[] = []; +// for (let i = 1; i < senArr?.length!; i++) { +// const newDoc = Docs.Create.ComparisonDocument(senArr![i], { _layout_isFlashcard: true, _width: 300, _height: 300 }); + +// if (StrCast(senArr![i]).includes('Keyword: ')) { +// const question = StrCast(senArr![i]).split('Keyword: '); +// const img = await this.fetchImages(question[1]); +// // newDoc['image'] = img; +// // const newDoc = Docs.Create.TextDocument(dataSplit[1]); +// const textSide1 = question[0].includes('Answer: ') ? question[0].split('Answer: ')[0] : question[0]; +// const textDoc1 = Docs.Create.TextDocument(question[0]); +// const rtfiel = new RichTextField( +// JSON.stringify({ +// doc: { +// type: 'doc', +// content: [ +// { +// type: 'paragraph', +// attrs: { align: null, color: null, id: null, indent: null, inset: null, lineSpacing: null, paddingBottom: null, paddingTop: null }, +// content: [ +// { type: 'text', text: question[0].includes('Answer: ') ? question[0].split('Answer: ')[0] : question[0] }, +// { type: 'dashDoc', attrs: { width: '200px', height: '200px', title: 'dashDoc', float: 'unset', hidden: false, docId: img![Id] } }, +// ], +// }, +// ], +// }, +// selection: { type: 'text', anchor: 2, head: 2 }, +// }), +// textSide1 +// ); + +// textDoc1[DocData].text = rtfiel; +// DocCast(newDoc)[DocData][this.fieldKey + '_1'] = textDoc1; + +// DocCast(newDoc)[DocData][this.fieldKey + '_0'] = Docs.Create.TextDocument(question[0].includes('Answer: ') ? question[0].split('Answer: ')[1] : question[1]); +// // Doc.AddToMyOverlay(img!); +// } + +// collectionArr.push(newDoc); +// } +// this.createFlashcardPile(collectionArr, true); +// }; + +// /** +// * Flips a flashcard to the alternate side for the user to view. +// */ +// flipFlashcard = () => { +// const usePath = this.layoutDoc[`_${this._props.fieldKey}_usePath`]; +// this.layoutDoc[`_${this._props.fieldKey}_usePath`] = usePath === undefined ? 'alternate' : undefined; +// }; + +// /** +// * Changes the view option to hover for a flashcard. +// */ +// hoverFlip = (side: string | undefined) => { +// if (this.layoutDoc[`_${this._props.fieldKey}_revealOp`] === 'hover') this.layoutDoc[`_${this._props.fieldKey}_usePath`] = side; +// }; + +// animateRes = (resIndex: number, newText: string, callType: GPTCallType) => { +// if (resIndex < newText.length) { +// // const marks = this._editorView?.state.storedMarks ?? []; +// switch (callType) { +// case GPTCallType.CHATCARD: +// DocCast(this.dataDoc[this.props.fieldKey + '_0'])[DocData].text += newText[resIndex]; +// break; +// case GPTCallType.QUIZ: +// this._outputValue += newText[resIndex]; +// break; +// default: +// return; +// } + +// // this._editorView?.dispatch(this._editorView?.state.tr.insertText(newText[resIndex]).setStoredMarks(this._outputValue)); +// setTimeout(() => this.animateRes(resIndex + 1, newText, callType), 20); +// } +// }; + +// /** +// * Calls the GPT model to create QuizCards. Evaluates how similar the user's response is to the alternate +// * side of the flashcard. +// */ +// askGPT = async (callType: GPTCallType): Promise<string | undefined> => { +// const questionText = 'Question: ' + StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text); +// const rubricText = ' Rubric: ' + StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_0']).text)?.Text); +// const queryText = questionText + ' UserAnswer: ' + this._inputValue + '. ' + rubricText; +// this._loading = true; +// const doc = DocCast(this.dataDoc[this.props.fieldKey + '_0']); +// if (callType == GPTCallType.CHATCARD) { +// if (StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text) === '') { +// this._loading = false; +// return; +// } +// this.flipFlashcard(); +// } +// try { +// console.log(queryText); +// const res = await gptAPICall(callType == GPTCallType.QUIZ ? queryText : questionText, callType); +// if (!res) { +// console.error('GPT call failed'); +// return; +// } +// // this.animateRes(0, res, callType); +// if (callType == GPTCallType.CHATCARD) { +// DocCast(this.dataDoc[this.props.fieldKey + '_0'])[DocData].text = res; +// // this.flipFlashcard(); +// } else if (callType == GPTCallType.QUIZ) { +// console.log(this._inputValue); +// this._outputValue = res.replace(/UserAnswer/g, "user's answer").replace(/Rubric/g, 'rubric'); +// } +// // DocCast(this.dataDoc[this.props.fieldKey + '_0'])[DocData].text = res; +// // this._outputValue = res; +// else if (callType === GPTCallType.FLASHCARD) { +// // console.log(res); +// this._loading = false; +// return res; +// } else if (callType === GPTCallType.STACK) { +// } +// this._loading = false; +// return res; +// // console.log(res); +// } catch (err) { +// console.error('GPT call failed'); +// } +// this._loading = false; +// }; +// layoutWidth = () => NumCast(this.layoutDoc.width, 200); +// layoutHeight = () => NumCast(this.layoutDoc.height, 200); + +// // specificMenu = (): void => { +// // const cm = ContextMenu.Instance; +// // cm.addItem({ description: 'Create an Answer on the Back', event: () => this.askGPT(GPTCallType.CHATCARD), icon: 'pencil' }); +// // }; + +// findImageTags = async () => { +// // const d = DocCast(this.dataDoc[this.props.fieldKey + '_0']); +// // const copy = Doc.MakeCopy(this.Document, true); +// const c = this.DocumentView?.().ContentDiv!.getElementsByTagName('img'); +// // this.ProseRef?.getElementsByTagName('img'); +// if (c?.length === 0) await this.askGPT(GPTCallType.CHATCARD); +// if (c) { +// this._loading = true; +// for (let i of c) { +// console.log(i); +// if (i.className !== 'ProseMirror-separator') await this.getImageDesc(i.src); +// } +// this._loading = false; +// // this.flipFlashcard(); +// } +// // console.log('HI' + this.ProseRef?.getElementsByTagName('img')); +// }; + +// static imageUrlToBase64 = async (imageUrl: string): Promise<string> => { +// try { +// const response = await fetch(imageUrl); +// const blob = await response.blob(); + +// return new Promise((resolve, reject) => { +// const reader = new FileReader(); +// reader.readAsDataURL(blob); +// reader.onloadend = () => resolve(reader.result as string); +// reader.onerror = error => reject(error); +// }); +// } catch (error) { +// console.error('Error:', error); +// throw error; +// } +// }; + +// getImageDesc = async (u: string) => { +// try { +// const hrefBase64 = await ComparisonBox.imageUrlToBase64(u); +// const response = await gptImageLabel(hrefBase64, 'Answer the following question as a short flashcard response. Do not include a label.' + (this.dataDoc.text as RichTextField)?.Text); + +// DocCast(this.dataDoc[this.props.fieldKey + '_0'])[DocData].text = response; +// } catch (error) { +// console.log('Error'); +// } +// }; + +// @observable private _listening = false; +// @observable transcriptElement = ''; +// SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; +// @observable recognition = new this.SpeechRecognition(); + +// handleResult = (e: SpeechRecognitionEvent) => { +// let interimTranscript = ''; +// let finalTranscript = ''; +// for (let i = e.resultIndex; i < e.results.length; i++) { +// const transcript = e.results[i][0].transcript; +// if (e.results[i].isFinal) { +// finalTranscript += transcript; +// } else { +// interimTranscript += transcript; +// } +// } +// console.log(interimTranscript); +// this._inputValue += finalTranscript; +// }; + +// setListening = () => { +// const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; +// if (SpeechRecognition) { +// this.recognition.continuous = true; +// this.recognition.interimResults = true; +// this.recognition.lang = 'en-US'; +// this.recognition.onresult = this.handleResult.bind(this); +// } +// ContextMenu.Instance.setLangIndex(0); +// }; + +// setLanguage = (e: React.MouseEvent, language: string, ind: number) => { +// this.recognition.lang = language; +// ContextMenu.Instance.setLangIndex(ind); +// }; + +// startListening = () => { +// this.recognition.start(); +// this._listening = true; +// }; + +// stopListening = () => { +// this.recognition.stop(); +// this._listening = false; +// }; + +// convertAbr = () => { +// switch (this.recognition.lang) { +// case 'en-US': +// return 'English'; +// case 'es-ES': +// return 'Spanish'; +// case 'fr-FR': +// return 'French'; +// case 'it-IT': +// return 'Italian'; +// case 'zh-CH': +// return 'Mandarin Chinese'; +// case 'ja': +// return 'Japanese'; +// default: +// return 'Korean'; +// } +// }; + +// openContextMenu = (x: number, y: number, evalu: boolean) => { +// ContextMenu.Instance.clearItems(); +// ContextMenu.Instance.addItem({ description: 'English', event: e => this.setLanguage(e, 'en-US', 0) }); //prettier-ignore +// ContextMenu.Instance.addItem({ description: 'Spanish', event: e => this.setLanguage(e, 'es-ES', 1 )}); //prettier-ignore +// ContextMenu.Instance.addItem({ description: 'French', event: e => this.setLanguage(e, 'fr-FR', 2) }); //prettier-ignore +// ContextMenu.Instance.addItem({ description: 'Italian', event: e => this.setLanguage(e, 'it-IT', 3) }); //prettier-ignore +// if (!evalu) ContextMenu.Instance.addItem({ description: 'Mandarin Chinese', event: e => this.setLanguage(e, 'zh-CH', 4) }); //prettier-ignore +// ContextMenu.Instance.addItem({ description: 'Japanese', event: e => this.setLanguage(e, 'ja', 5) }); //prettier-ignore +// ContextMenu.Instance.addItem({ description: 'Korean', event: e => this.setLanguage(e, 'ko', 6) }); //prettier-ignore +// ContextMenu.Instance.displayMenu(x, y); +// }; + +// evaluatePronunciation = () => { +// const newAudio = Docs.Create.AudioDocument(nullAudio, { _width: 200, _height: 100 }); +// this.Document.audio = newAudio[DocData]; +// // DocCast(this.Document.embedContainer)()._props.addDocument?.(newAudio); +// this._props.DocumentView?.()._props.addDocument?.(newAudio); +// // Doc.AddToMyOverlay(newAudio); +// }; + +// render() { +// const clearButton = (which: string) => ( +// <Tooltip title={<div className="dash-tooltip">remove</div>}> +// <div +// // style={{ position: 'relative', top: '0px', left: '10px' }} +// ref={this._closeRef} +// className={`clear-button ${which}`} +// onPointerDown={e => this.closeDown(e, which)} // prevent triggering slider movement in registerSliding +// > +// <FontAwesomeIcon className={`clear-button ${which}`} icon="times" size="xs" /> +// </div> +// </Tooltip> +// ); +// const displayDoc = (whichSlot: string) => { +// const whichDoc = DocCast(this.dataDoc[whichSlot]); +// const targetDoc = DocCast(whichDoc?.annotationOn, whichDoc); +// const layoutString = targetDoc ? '' : this.testForTextFields(whichSlot); +// // whichDoc['backgroundColor'] = this.layoutDoc['backgroundColor']; + +// return targetDoc || layoutString ? ( +// // <MathJaxContext config={this.mathJaxConfig}> +// // <MathJax> +// <> +// <DocumentView +// // eslint-disable-next-line react/jsx-props-no-spreading +// {...this._props} +// ignoreUsePath={layoutString ? true : undefined} +// renderDepth={this.props.renderDepth + 1} +// LayoutTemplateString={layoutString} +// Document={layoutString ? this.Document : targetDoc} +// containerViewPath={this.DocumentView?.().docViewPath} +// moveDocument={whichSlot.endsWith('1') ? this.moveDoc1 : this.moveDoc2} +// removeDocument={whichSlot.endsWith('1') ? this.remDoc1 : this.remDoc2} +// NativeWidth={this.layoutWidth} +// NativeHeight={this.layoutHeight} +// isContentActive={() => this.childActive} +// isDocumentActive={returnFalse} +// dontSelect={returnTrue} +// whenChildContentsActiveChanged={this.whenChildContentsActiveChanged} +// styleProvider={this.childActive ? this._props.styleProvider : this.docStyleProvider} +// hideLinkButton +// pointerEvents={this.childActive ? undefined : returnNone} +// /> +// {/* </MathJax> */} +// {/* <div style={{ position: 'absolute', top: '-5px', left: '2px' }}>{layoutString ? null : clearButton(whichSlot)}</div> */} +// {/* </MathJaxContext> // placeholder image if doc is missingleft: `${NumCast(this.layoutDoc.width, 200) - 33}px` */} +// </> +// ) : ( +// <div className="placeholder"> +// <FontAwesomeIcon className="upload-icon" icon="cloud-upload-alt" size="lg" /> +// </div> +// ); +// }; +// const displayBox = (which: string, index: number, cover: number) => ( +// <div +// className={`${index === 0 ? 'before' : 'after'}Box-cont`} +// key={which} +// style={{ width: this._props.PanelWidth() }} +// onPointerDown={e => { +// this.registerSliding(e, cover); +// this.activateContent(); +// }} +// ref={ele => this.createDropTarget(ele, which, index)}> +// {!this._isEmpty ? displayDoc(which) : null} +// {/* {this.dataDoc[this.fieldKey + '_0'] !== 'empty' ? displayDoc(which) : null} */} +// </div> +// ); + +// if (this.Document._layout_isFlashcard) { +// const side = this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate' ? 1 : 0; + +// // add text box to each side when comparison box is first created +// // (!this.dataDoc[this.fieldKey + '_0'] && this.dataDoc[this._props.fieldKey + '_0'] !== 'empty') +// if (!this.dataDoc[this.fieldKey + '_0'] && !this._isEmpty) { +// const dataSplit = StrCast(this.dataDoc.data).includes('Keyword: ') ? StrCast(this.dataDoc.data).split('Keyword: ') : StrCast(this.dataDoc.data).split('Answer: '); +// const newDoc = Docs.Create.TextDocument(dataSplit[1]); +// this.addDoc(newDoc, this.fieldKey + '_0'); +// } + +// if (!this.dataDoc[this.fieldKey + '_1'] && !this._isEmpty) { +// const dataSplit = StrCast(this.dataDoc.data).includes('Keyword: ') ? StrCast(this.dataDoc.data).split('Keyword: ') : StrCast(this.dataDoc.data).split('Answer: '); +// const newDoc = Docs.Create.TextDocument(dataSplit[0]); +// this.addDoc(newDoc, this.fieldKey + '_1'); +// } + +// if (DocCast(this.Document.embedContainer) && DocCast(this.Document.embedContainer).practiceMode === 'quiz') { +// const text = StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text); +// return ( +// <div className={`comparisonBox${this._props.isContentActive() ? '-interactive' : ''}`} style={{ display: 'flex', flexDirection: 'column' }}> +// <p style={{ color: 'white', padding: 10 }}>{text}</p> +// <p style={{ display: text === '' ? 'flex' : 'none', color: 'white', marginLeft: '10px' }}>Return to all flashcards and add text to both sides. </p> +// <div className="input-box"> +// <textarea +// value={this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate' ? this._outputValue : this._inputValue} +// onChange={this.handleInputChange} +// onScroll={e => { +// e.stopPropagation(); +// e.preventDefault(); +// }} +// placeholder={!this.layoutDoc[`_${this._props.fieldKey}_usePath`] ? 'Enter a response for GPT to evaluate.' : ''} +// readOnly={this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate'}></textarea> + +// {this._loading ? ( +// <div className="loading-spinner" style={{ position: 'absolute' }}> +// <ReactLoading type="spin" height={30} width={30} color={'blue'} /> +// </div> +// ) : null} +// </div> +// <div> +// <div className="submit-button" style={{ overflow: 'hidden', display: 'flex', width: '100%' }}> +// <div +// className="submit-buttonschema-header-button" +// onPointerDown={e => this.openContextMenu(e.clientX, e.clientY, false)} +// style={{ position: 'absolute', top: '5px', left: '11px', zIndex: '100', width: '5px', height: '5px', cursor: 'pointer' }}> +// <FontAwesomeIcon color={'white'} icon="caret-down" /> +// </div> +// <button className="submit-buttonrecord" onClick={this._listening ? this.stopListening : this.startListening} style={{ background: this._listening ? 'lightgray' : '', borderRadius: '2px' }}> +// {<FontAwesomeIcon icon="microphone" size="lg" />} +// </button> +// <div +// className="submit-buttonschema-header-button" +// onPointerDown={e => this.openContextMenu(e.clientX, e.clientY, true)} +// style={{ position: 'absolute', top: '5px', left: '50px', zIndex: '100', width: '5px', height: '5px', cursor: 'pointer' }}> +// <FontAwesomeIcon color={'white'} icon="caret-down" /> +// </div> +// <button +// className="submit-buttonpronunciation" +// onClick={this.evaluatePronunciation} +// style={{ display: 'inline-flex', alignItems: 'center', background: this._listening ? 'lightgray' : '', borderRadius: '2px', width: '100%' }}> +// Evaluate Pronunciation +// </button> + +// {this.layoutDoc[`_${this._props.fieldKey}_usePath`] !== 'alternate' ? ( +// <button className="submit-buttonsubmit" type="button" onClick={this.handleRenderGPTClick} style={{ borderRadius: '2px', marginBottom: '3px', width: '100%' }}> +// Submit +// </button> +// ) : ( +// <button className="submit-buttonsubmit" type="button" onClick={this.handleRenderClick} style={{ display: 'inline-flex', alignItems: 'center', borderRadius: '2px', marginBottom: '3px', width: '100%' }}> +// Redo the Question +// </button> +// )} +// </div> +// </div> +// </div> +// ); +// } + +// // render a normal flashcard when not a QuizCard +// return ( +// <div +// className={`comparisonBox${this._props.isContentActive() ? '-interactive' : ''}`} /* change className to easily disable/enable pointer events in CSS */ +// style={{ display: 'flex', flexDirection: 'column', overflow: 'hidden' }} +// onMouseEnter={() => { +// this.hoverFlip('alternate'); +// }} +// onMouseLeave={() => { +// this.hoverFlip(undefined); +// }}> +// {displayBox(`${this.fieldKey}_${side === 0 ? 1 : 0}`, side, this._props.PanelWidth() - 3)} +// {this._loading ? ( +// <div className="loading-spinner" style={{ position: 'absolute' }}> +// <ReactLoading type="spin" height={30} width={30} color={'blue'} /> +// </div> +// ) : null} +// {this._props.isContentActive() ? this.flashcardMenu : null} +// {this.overlayAlternateIcon} +// </div> +// ); +// } +// // render a comparison box that compares items side by side +// return ( +// <div className={`comparisonBox${this._props.isContentActive() ? '-interactive' : ''}` /* change className to easily disable/enable pointer events in CSS */}> +// {displayBox(`${this.fieldKey}_2`, 1, this._props.PanelWidth() - 3)} +// <div className="clip-div" style={{ width: this.clipWidth + '%', transition: this._animating, background: StrCast(this.layoutDoc._backgroundColor, 'gray') }}> +// {displayBox(`${this.fieldKey}_1`, 0, 0)} +// </div> + +// <div +// className="slide-bar" +// style={{ +// left: `calc(${this.clipWidth + '%'} - 0.5px)`, +// cursor: this.clipWidth < 5 ? 'e-resize' : this.clipWidth / 100 > (this._props.PanelWidth() - 5) / this._props.PanelWidth() ? 'w-resize' : undefined, +// }} +// onPointerDown={e => !this.childActive && this.registerSliding(e, this._props.PanelWidth() / 2)} /* if clicked, return slide-bar to center */ +// > +// <div className="slide-handle" /> +// </div> +// </div> +// ); +// } +// } + +// Docs.Prototypes.TemplateMap.set(DocumentType.COMPARISON, { +// data: '', +// layout: { view: ComparisonBox, dataField: 'data' }, +// options: { +// acl: '', +// backgroundColor: 'gray', +// dropAction: dropActionType.move, +// waitForDoubleClickToClick: 'always', +// _layout_reflowHorizontal: true, +// _layout_reflowVertical: true, +// _layout_nativeDimEditable: true, +// systemIcon: 'BsLayoutSplit', +// }, +// }); diff --git a/src/client/views/nodes/ComparisonBoxNew.tsx b/src/client/views/nodes/ComparisonBoxNew.tsx new file mode 100644 index 000000000..0fcf2d3c7 --- /dev/null +++ b/src/client/views/nodes/ComparisonBoxNew.tsx @@ -0,0 +1,848 @@ +import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; +import { MathJax, MathJaxContext } from 'better-react-mathjax'; +import { Tooltip } from '@mui/material'; +import { action, computed, makeObservable, observable, reaction } from 'mobx'; +import { observer } from 'mobx-react'; +import * as React from 'react'; +import { returnFalse, returnNone, returnTrue, setupMoveUpEvents } from '../../../ClientUtils'; +import { emptyFunction } from '../../../Utils'; +import { Doc, Opt } from '../../../fields/Doc'; +import { DocData } from '../../../fields/DocSymbols'; +import { RichTextField } from '../../../fields/RichTextField'; +import { DocCast, NumCast, RTFCast, StrCast, toList } from '../../../fields/Types'; +import { GPTCallType, gptAPICall, gptImageLabel } from '../../apis/gpt/GPT'; +import '../pdf/GPTPopup/GPTPopup.scss'; +import { DocUtils } from '../../documents/DocUtils'; +import { DocumentType } from '../../documents/DocumentTypes'; +import { Docs } from '../../documents/Documents'; +import { DragManager } from '../../util/DragManager'; +import { dropActionType } from '../../util/DropActionTypes'; +import { undoable } from '../../util/UndoManager'; +import { ViewBoxAnnotatableComponent } from '../DocComponent'; +import { PinDocView, PinProps } from '../PinFuncs'; +import { StyleProp } from '../StyleProp'; +import './ComparisonBox.scss'; +import { DocumentView } from './DocumentView'; +import { FieldView, FieldViewProps } from './FieldView'; +import { FormattedTextBox } from './formattedText/FormattedTextBox'; +import ReactLoading from 'react-loading'; +import { ContextMenu } from '../ContextMenu'; +import { ContextMenuProps } from '../ContextMenuItem'; +import { tickStep } from 'd3'; +import { CollectionCarouselView } from '../collections/CollectionCarouselView'; +import { FollowLinkScript } from '../../documents/DocUtils'; +import { schema } from '../nodes/formattedText/schema_rts'; +import { Id } from '../../../fields/FieldSymbols'; +import axios from 'axios'; +import ReactMarkdown from 'react-markdown'; +import { WebField, nullAudio } from '../../../fields/URLField'; + +const API_URL = 'https://api.unsplash.com/search/photos'; + +@observer +export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { + public static LayoutString(fieldKey: string) { + return FieldView.LayoutString(ComparisonBox, fieldKey); + } + private _disposers: (DragManager.DragDropDisposer | undefined)[] = [undefined, undefined]; + constructor(props: FieldViewProps) { + super(props); + makeObservable(this); + this.setListening(); + } + + @observable private _inputValue = ''; + @observable private _outputValue = ''; + @observable private _loading = false; + @observable private _isEmpty = false; + @observable private _audio: Doc = Docs.Create.TextDocument(''); + @observable childActive = false; + @observable _yRelativeToTop: boolean = true; + @observable _animating = ''; + @observable mathJaxConfig = { + loader: { load: ['input/asciimath'] }, + }; + @observable private _listening = false; + @observable transcriptElement = ''; + @observable private frontSide = false; + SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; + @observable recognition = new this.SpeechRecognition(); + _closeRef = React.createRef<HTMLDivElement>(); + + get revealOp() { + return this.layoutDoc[`_${this._props.fieldKey}_revealOp`]; + } + get clipHeightKey() { + return '_' + this._props.fieldKey + '_clipHeight'; + } + + get clipWidthKey() { + return '_' + this._props.fieldKey + '_clipWidth'; + } + + @computed get clipWidth() { + return NumCast(this.layoutDoc[this.clipWidthKey], 50); + } + + @computed get clipHeight() { + return NumCast(this.layoutDoc[this.clipHeightKey], 200); + } + + @computed get overlayAlternateIcon() { + return ( + <Tooltip title={<div className="dash-tooltip">flip</div>}> + <div + className="formattedTextBox-alternateButton" + onPointerDown={e => + setupMoveUpEvents(e.target, e, returnFalse, emptyFunction, () => { + if (!this.revealOp || this.revealOp === 'flip') { + this.flipFlashcard(); + } + }) + } + style={{ + background: this.revealOp === 'hover' ? 'gray' : this.frontSide ? 'white' : 'black', + color: this.revealOp === 'hover' ? 'black' : this.frontSide ? 'black' : 'white', + display: 'inline-block', + }}> + <div key="alternate" className="formattedTextBox-flip"> + <FontAwesomeIcon icon="turn-up" size="1x" /> + </div> + </div> + </Tooltip> + ); + } + + @computed get flashcardMenu() { + return ( + <div> + <Tooltip + title={this.frontSide ? <div className="dash-tooltip">Flip to front side to use GPT</div> : <div className="dash-tooltip">Ask GPT to create an answer on the back side of the flashcard based on your question on the front</div>}> + <div style={{ position: 'absolute', bottom: '3px', right: '50px', cursor: 'pointer' }} onPointerDown={e => (!this.frontSide ? this.findImageTags() : null)}> + <FontAwesomeIcon icon="lightbulb" size="xl" /> + </div> + </Tooltip> + {DocCast(this.Document.embedContainer).type_collection === 'carousel' ? null : ( + <div> + <Tooltip title={<div>Create a flashcard pile</div>}> + <div style={{ position: 'absolute', bottom: '3px', right: '74px', cursor: 'pointer' }} onPointerDown={e => this.createFlashcardPile([this.Document], false)}> + <FontAwesomeIcon icon="folder-plus" size="xl" /> + </div> + </Tooltip> + <Tooltip title={<div className="dash-tooltip">Create new flashcard stack based on text</div>}> + <div style={{ position: 'absolute', bottom: '3px', right: '104px', cursor: 'pointer' }} onClick={e => this.gptFlashcardPile()}> + <FontAwesomeIcon icon="layer-group" size="xl" /> + </div> + </Tooltip> + </div> + )} + <Tooltip title={<div className="dash-tooltip">Hover to reveal</div>}> + <div style={{ position: 'absolute', bottom: '3px', right: '25px', cursor: 'pointer' }} onClick={e => this.handleHover()}> + <FontAwesomeIcon color={this.revealOp === 'hover' ? 'blue' : 'black'} icon="hand-point-up" size="xl" /> + </div> + </Tooltip> + </div> + ); + } + + @action handleHover = () => { + if (this.revealOp === 'hover') { + this.layoutDoc[`_${this._props.fieldKey}_revealOp`] = 'flip'; + this.Document.forceActive = false; + } else { + this.layoutDoc[`_${this._props.fieldKey}_revealOp`] = 'hover'; + this.Document.forceActive = true; + } + }; + + @action handleInputChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => { + this._inputValue = e.target.value; + console.log(this._inputValue); + }; + + @action activateContent = () => { + this.childActive = true; + }; + + @action handleRenderClick = () => { + this.frontSide = !this.frontSide; + }; + + @action handleRenderGPTClick = async () => { + console.log('Phonetic transcription: ' + DocCast(this.Document.audio).phoneticTranscription); + const phonTrans = DocCast(this.Document.audio).phoneticTranscription; + if (phonTrans) { + this._inputValue = StrCast(phonTrans); + console.log('INPUT:' + this._inputValue); + this.askGPTPhonemes(this._inputValue); + } else if (this._inputValue) this.askGPT(GPTCallType.QUIZ); + this.frontSide = false; + this._outputValue = ''; + }; + + @action + private onPointerMove = ({ movementX }: PointerEvent) => { + const width = movementX * this.ScreenToLocalBoxXf().Scale + (this.clipWidth / 100) * this._props.PanelWidth(); + if (width && width > 5 && width < this._props.PanelWidth()) { + this.layoutDoc[this.clipWidthKey] = (width * 100) / this._props.PanelWidth(); + } + return false; + }; + + componentDidMount() { + this._props.setContentViewBox?.(this); + reaction( + () => this._props.isSelected(), // when this reaction should update + selected => !selected && (this.childActive = false) // what it should update to + ); + } + + protected createDropTarget = (ele: HTMLDivElement | null, fieldKey: string, disposerId: number) => { + this._disposers[disposerId]?.(); + if (ele) { + this._disposers[disposerId] = DragManager.MakeDropTarget(ele, (e, dropEvent) => this.internalDrop(e, dropEvent, fieldKey), this.layoutDoc); + } + }; + + private internalDrop = undoable((e: Event, dropEvent: DragManager.DropEvent, fieldKey: string) => { + if (dropEvent.complete.docDragData) { + const { droppedDocuments } = dropEvent.complete.docDragData; + const added = dropEvent.complete.docDragData.moveDocument?.(droppedDocuments, this.Document, (doc: Doc | Doc[]) => this.addDoc(toList(doc).lastElement(), fieldKey)); + Doc.SetContainer(droppedDocuments.lastElement(), this.dataDoc); + !added && e.preventDefault(); + e.stopPropagation(); // prevent parent Doc from registering new position so that it snaps back into place + // this.childActive = false; + return added; + } + return undefined; + }, 'internal drop'); + + getAnchor = (addAsAnnotation: boolean, pinProps?: PinProps) => { + const anchor = Docs.Create.ConfigDocument({ + title: 'CompareAnchor:' + this.Document.title, + // set presentation timing properties for restoring view + presentation_transition: 1000, + annotationOn: this.Document, + }); + if (anchor) { + if (!addAsAnnotation) anchor.backgroundColor = 'transparent'; + /* addAsAnnotation && */ this.addDocument(anchor); + PinDocView(anchor, { pinDocLayout: pinProps?.pinDocLayout, pinData: { ...(pinProps?.pinData ?? {}), clippable: true } }, this.Document); + return anchor; + } + return this.Document; + }; + + clearDoc = undoable((fieldKey: string) => { + this.dataDoc[fieldKey] = undefined; + this._isEmpty = true; + }, 'clear doc'); + + moveDoc = (doc: Doc, addDocument: (document: Doc | Doc[]) => boolean, which: string) => this.remDoc(doc, which) && addDocument(doc); + addDoc = (doc: Doc, which: string) => { + if (this.dataDoc[which] && !this._isEmpty) return false; + this.dataDoc[which] = doc; + return true; + }; + remDoc = (doc: Doc, which: string) => { + if (this.dataDoc[which] === doc) { + this._isEmpty = true; + // this.dataDoc[which] = 'empty'; + console.log('HEREEEE'); + this.dataDoc[which] = undefined; + return true; + } + console.log('FALSE'); + return false; + }; + + closeDown = (e: React.PointerEvent, which: string) => { + setupMoveUpEvents( + this, + e, + moveEv => { + const de = new DragManager.DocumentDragData([DocCast(this.dataDoc[which])], dropActionType.move); + de.moveDocument = (doc: Doc | Doc[], targetCollection: Doc | undefined, addDocument: (doc: Doc | Doc[]) => boolean): boolean => addDocument(doc); + de.canEmbed = true; + DragManager.StartDocumentDrag([this._closeRef.current!], de, moveEv.clientX, moveEv.clientY); + return true; + }, + emptyFunction, + () => this.clearDoc(which) + ); + }; + docStyleProvider = (doc: Opt<Doc>, props: Opt<FieldViewProps>, property: string): any => { + if (property === StyleProp.PointerEvents) return 'none'; + return this._props.styleProvider?.(doc, props, property); + }; + moveDoc1 = (docs: Doc | Doc[], targetCol: Doc | undefined, addDoc: any) => toList(docs).reduce((res, doc: Doc) => res && this.moveDoc(doc, addDoc, this.fieldKey + '_1'), true); + moveDoc2 = (docs: Doc | Doc[], targetCol: Doc | undefined, addDoc: any) => toList(docs).reduce((res, doc: Doc) => res && this.moveDoc(doc, addDoc, this.fieldKey + '_2'), true); + remDoc1 = (docs: Doc | Doc[]) => toList(docs).reduce((res, doc) => res && this.remDoc(doc, this.fieldKey + '_1'), true); + remDoc2 = (docs: Doc | Doc[]) => toList(docs).reduce((res, doc) => res && this.remDoc(doc, this.fieldKey + '_2'), true); + + private registerSliding = (e: React.PointerEvent<HTMLDivElement>, targetWidth: number) => { + if (e.button !== 2) { + setupMoveUpEvents( + this, + e, + this.onPointerMove, + emptyFunction, + action((moveEv, doubleTap) => { + if (doubleTap) { + this.childActive = true; + if (!this.dataDoc[this.fieldKey + '_1'] && !this.dataDoc[this.fieldKey]) this.dataDoc[this.fieldKey + '_1'] = DocUtils.copyDragFactory(Doc.UserDoc().emptyNote as Doc); + if (!this.dataDoc[this.fieldKey + '_2'] && !this.dataDoc[this.fieldKey + '_alternate']) this.dataDoc[this.fieldKey + '_2'] = DocUtils.copyDragFactory(Doc.UserDoc().emptyNote as Doc); + } + }), + false, + undefined, + action(() => { + if (this.childActive) return; + this._animating = 'all 200ms'; + // on click, animate slider movement to the targetWidth + this.layoutDoc[this.clipWidthKey] = (targetWidth * 100) / this._props.PanelWidth(); + // this.layoutDoc[this.clipHeightKey] = (targetWidth * 100) / this._props.PanelHeight(); + + setTimeout( + action(() => { + this._animating = ''; + }), + 200 + ); + }) + ); + } + }; + + setListening = () => { + const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; + if (SpeechRecognition) { + this.recognition.continuous = true; + this.recognition.interimResults = true; + this.recognition.lang = 'en-US'; + this.recognition.onresult = this.handleResult.bind(this); + } + ContextMenu.Instance.setLangIndex(0); + }; + + startListening = () => { + this.recognition.start(); + this._listening = true; + }; + + stopListening = () => { + this.recognition.stop(); + this._listening = false; + }; + + setLanguage = (e: React.MouseEvent, language: string, ind: number) => { + this.recognition.lang = language; + ContextMenu.Instance.setLangIndex(ind); + }; + + convertAbr = () => { + switch (this.recognition.lang) { + case 'en-US': return 'English'; //prettier-ignore + case 'es-ES': return 'Spanish'; //prettier-ignore + case 'fr-FR': return 'French'; //prettier-ignore + case 'it-IT': return 'Italian'; //prettier-ignore + case 'zh-CH': return 'Mandarin Chinese'; //prettier-ignore + case 'ja': return 'Japanese'; //prettier-ignore + default: return 'Korean'; //prettier-ignore + } + }; + + openContextMenu = (x: number, y: number, evalu: boolean) => { + ContextMenu.Instance.clearItems(); + ContextMenu.Instance.addItem({ description: 'English', event: e => this.setLanguage(e, 'en-US', 0) }); //prettier-ignore + ContextMenu.Instance.addItem({ description: 'Spanish', event: e => this.setLanguage(e, 'es-ES', 1 )}); //prettier-ignore + ContextMenu.Instance.addItem({ description: 'French', event: e => this.setLanguage(e, 'fr-FR', 2) }); //prettier-ignore + ContextMenu.Instance.addItem({ description: 'Italian', event: e => this.setLanguage(e, 'it-IT', 3) }); //prettier-ignore + if (!evalu) ContextMenu.Instance.addItem({ description: 'Mandarin Chinese', event: e => this.setLanguage(e, 'zh-CH', 4) }); //prettier-ignore + ContextMenu.Instance.addItem({ description: 'Japanese', event: e => this.setLanguage(e, 'ja', 5) }); //prettier-ignore + ContextMenu.Instance.addItem({ description: 'Korean', event: e => this.setLanguage(e, 'ko', 6) }); //prettier-ignore + ContextMenu.Instance.displayMenu(x, y); + }; + + evaluatePronunciation = () => { + const newAudio = Docs.Create.AudioDocument(nullAudio, { _width: 200, _height: 100 }); + this.Document.audio = newAudio[DocData]; + this._props.DocumentView?.()._props.addDocument?.(newAudio); + }; + + pushInfo = async () => { + const audio = { + file: this._audio.url, + }; + const response = await axios.post('http://localhost:105/recognize/', audio, { + headers: { + 'Content-Type': 'application/json', + }, + }); + this.Document.phoneticTranscription = response.data['transcription']; + console.log('RESPONSE: ' + response.data['transcription']); + }; + + createFlashcardPile(collectionArr: Doc[], gpt: boolean) { + const newCol = Docs.Create.CarouselDocument(collectionArr, { + _width: NumCast(this.layoutDoc['_' + this._props.fieldKey + '_width'], 250) + 50, + _height: NumCast(this.layoutDoc['_' + this._props.fieldKey + '_width'], 200) + 50, + _layout_fitWidth: false, + _layout_autoHeight: true, + }); + newCol['x'] = this.layoutDoc['x']; + newCol['y'] = NumCast(this.layoutDoc['y']) + 50; + newCol.type_collection = 'carousel'; + + if (gpt) { + this._props.DocumentView?.()._props.addDocument?.(newCol); + this._props.removeDocument?.(this.Document); + } else { + this._props.addDocument?.(newCol); + this._props.removeDocument?.(this.Document); + this.Document.embedContainer = newCol; + } + } + + gptFlashcardPile = async () => { + var text = await this.askGPT(GPTCallType.STACK); + var senArr = text?.split('Question: '); + var collectionArr: Doc[] = []; + for (let i = 1; i < senArr?.length!; i++) { + const newDoc = Docs.Create.ComparisonDocument(senArr![i], { _layout_isFlashcard: true, _width: 300, _height: 300 }); + + if (StrCast(senArr![i]).includes('Keyword: ')) { + const question = StrCast(senArr![i]).split('Keyword: '); + const img = await this.fetchImages(question[1]); + const textSide1 = question[0].includes('Answer: ') ? question[0].split('Answer: ')[0] : question[0]; + const textDoc1 = Docs.Create.TextDocument(question[0]); + const rtfiel = new RichTextField( + JSON.stringify({ + doc: { + type: 'doc', + content: [ + { + type: 'paragraph', + attrs: { align: null, color: null, id: null, indent: null, inset: null, lineSpacing: null, paddingBottom: null, paddingTop: null }, + content: [ + { type: 'text', text: question[0].includes('Answer: ') ? question[0].split('Answer: ')[0] : question[0] }, + { type: 'dashDoc', attrs: { width: '200px', height: '200px', title: 'dashDoc', float: 'unset', hidden: false, docId: img![Id] } }, + ], + }, + ], + }, + selection: { type: 'text', anchor: 2, head: 2 }, + }), + textSide1 + ); + + textDoc1[DocData].text = rtfiel; + DocCast(newDoc)[DocData][this.fieldKey + '_1'] = textDoc1; + DocCast(newDoc)[DocData][this.fieldKey + '_0'] = Docs.Create.TextDocument(question[0].includes('Answer: ') ? question[0].split('Answer: ')[1] : question[1]); + } + collectionArr.push(newDoc); + } + this.createFlashcardPile(collectionArr, true); + }; + + askGPT = async (callType: GPTCallType): Promise<string | undefined> => { + const questionText = 'Question: ' + StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text); + const rubricText = ' Rubric: ' + StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_0']).text)?.Text); + const queryText = questionText + ' UserAnswer: ' + this._inputValue + '. ' + rubricText; + this._loading = true; + const doc = DocCast(this.dataDoc[this.props.fieldKey + '_0']); + if (callType == GPTCallType.CHATCARD) { + if (StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text) === '') { + this._loading = false; + return; + } + this.flipFlashcard(); + } + try { + console.log(queryText); + const res = await gptAPICall(callType == GPTCallType.QUIZ ? queryText : questionText, callType); + if (!res) { + console.error('GPT call failed'); + return; + } + if (callType == GPTCallType.CHATCARD) { + DocCast(this.dataDoc[this.props.fieldKey + '_0'])[DocData].text = res; + } else if (callType == GPTCallType.QUIZ) { + console.log(this._inputValue); + this._outputValue = res.replace(/UserAnswer/g, "user's answer").replace(/Rubric/g, 'rubric'); + } else if (callType === GPTCallType.FLASHCARD) { + this._loading = false; + return res; + } else if (callType === GPTCallType.STACK) { + } + this._loading = false; + return res; + } catch (err) { + console.error('GPT call failed'); + } + this._loading = false; + }; + layoutWidth = () => NumCast(this.layoutDoc.width, 200); + layoutHeight = () => NumCast(this.layoutDoc.height, 200); + + findImageTags = async () => { + const c = this.DocumentView?.().ContentDiv!.getElementsByTagName('img'); + if (c?.length === 0) await this.askGPT(GPTCallType.CHATCARD); + if (c) { + this._loading = true; + for (let i of c) { + console.log(i); + if (i.className !== 'ProseMirror-separator') await this.getImageDesc(i.src); + } + this._loading = false; + } + }; + + askGPTPhonemes = async (phonemes: string) => { + const sentence = StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text); + const phon = 'w ʌ ɪ z j ɔː ɹ n e ɪ m '; + const phon2 = 'h ʌ ɛ r j ʌ t ʌ d eɪ'; + const phon6 = 'huː ɑɹ juː tədeɪ'; + const phon3 = 'ʃ eɪ oʊ s i ʃ oʊ z b aɪ ð ə s iː ʃ oʊ'; + const phon4 = 'kamo estas hɔi'; + const phon5 = 'la s e n a l'; + const promptEng = + 'Consider all possible phonetic transcriptions of the intended sentence "' + + sentence + + '" that is standard in American speech without showing the user. Compare each word in the following phonemes with those phonetic transcriptions without displaying anything to the user: "' + + phon6 + + '". Steps to do this: Align the words with each word in the intended sentence by combining the phonemes to get a pronunciation that resembles the word in order. Do not describe phonetic corrections with the phonetic alphabet - describe it by providing other examples of how it should sound. Note if a word or sound missing, including missing vowels and consonants. If there is an additional word that does not match with the provided sentence, say so. For each word, if any letters mismatch and would sound weird in American speech and they are not allophones of the same phoneme and they are far away from each on the ipa vowel chat and that pronunciation is not normal for the meaning of the word, note this difference and explain how it is supposed to sound. Only note the difference if they are not allophones of the same phoneme and if they are far away on the vowel chart. The goal is to be understood, not sound like a native speaker. Just so you know, "i" sounds like "ee" as in "bee", not "ih" as an "lick". Interpret "ɹ" as the same as "r". Interpret "ʌ" as the same as "ə". If "ɚ", "ɔː", and "ɔ" are options for pronunciation, do not choose "ɚ". Ignore differences with colons. Ignore redundant letters and words and sounds and the splitting of words; do not mention this since there could be repeated words in the sentence. Provide a response like this: "Lets work on improving the pronunciation of "coffee." You said "ceeffee," which is close, but we need to adjust the vowel sound. In American English, "coffee" is pronounced /ˈkɔːfi/, with a long "aw" sound. Try saying "kah-fee." Your intonation is good, but try putting a bit more stress on "like" in the sentence "I would like a coffee with milk." This will make your speech sound more natural. Keep practicing, and lets try saying the whole sentence again!"'; + const promptSpa = + 'Consider all possible phonetic transcriptions of the intended sentence "' + + 'como estás hoy' + + '" that is standard in Spanish speech without showing the user. Compare each word in the following phonemes with those phonetic transcriptions without displaying anything to the user: "' + + phon4 + + '". Steps to do this: Align the words with each word in the intended sentence by combining the phonemes to get a pronunciation that resembles the word in order. Do not describe phonetic corrections with the phonetic alphabet - describe it by providing other examples of how it should sound. Note if a word or sound missing, including missing vowels and consonants. If there is an additional word that does not match with the provided sentence, say so. For each word, if any letters mismatch and would sound weird in Spanish speech and they are not allophones of the same phoneme and they are far away from each on the ipa vowel chat and that pronunciation is not normal for the meaning of the word, note this difference and explain how it is supposed to sound. Only note the difference if they are not allophones of the same phoneme and if they are far away on the vowel chart; say good job if it would be understood by a native Spanish speaker. Just so you know, "i" sounds like "ee" as in "bee", not "ih" as an "lick". Interpret "ɹ" as the same as "r". Interpret "ʌ" as the same as "ə". Do not make "θ" and "f" interchangable. Do not make "n" and "ɲ" interchangable. Do not make "e" and "i" interchangable. If "ɚ", "ɔː", and "ɔ" are options for pronunciation, do not choose "ɚ". Ignore differences with colons. Ignore redundant letters and words and sounds and the splitting of words; do not mention this since there could be repeated words in the sentence. Identify "ɔi" sounds like "oy". Ignore accents and do not say anything to the user about this.'; + const promptAll = + 'Consider all possible phonetic transcriptions of the intended sentence "' + + sentence + + '" that is standard in ' + + this.convertAbr() + + ' speech without showing the user. Compare each word in the following phonemes with those phonetic transcriptions without displaying anything to the user: "' + + phonemes + + '". Steps to do this: Align the words with each word in the intended sentence by combining the phonemes to get a pronunciation that resembles the word in order. Do not describe phonetic corrections with the phonetic alphabet - describe it by providing other examples of how it should sound. Note if a word or sound missing, including missing vowels and consonants. If there is an additional word that does not match with the provided sentence, say so. For each word, if any letters mismatch and would sound weird in ' + + this.convertAbr() + + ' speech and they are not allophones of the same phoneme and they are far away from each on the ipa vowel chat and that pronunciation is not normal for the meaning of the word, note this difference and explain how it is supposed to sound. Just so you know, "i" sounds like "ee" as in "bee", not "ih" as an "lick". Interpret "ɹ" as the same as "r". Interpret "ʌ" as the same as "ə". Do not make "θ" and "f" interchangable. Do not make "n" and "ɲ" interchangable. Do not make "e" and "i" interchangable. If "ɚ", "ɔː", and "ɔ" are options for pronunciation, do not choose "ɚ". Ignore differences with colons. Ignore redundant letters and words and sounds and the splitting of words; do not mention this since there could be repeated words in the sentence. Provide a response like this: "Lets work on improving the pronunciation of "coffee." You said "cawffee," which is close, but we need to adjust the vowel sound. In American English, "coffee" is pronounced /ˈkɔːfi/, with a long "aw" sound. Try saying "kah-fee." Your intonation is good, but try putting a bit more stress on "like" in the sentence "I would like a coffee with milk." This will make your speech sound more natural. Keep practicing, and lets try saying the whole sentence again!"'; + + switch (this.recognition.lang) { + case 'en-US': + this._outputValue = await gptAPICall(promptEng, GPTCallType.PRONUNCIATION); + break; + case 'es-ES': + this._outputValue = await gptAPICall(promptSpa, GPTCallType.PRONUNCIATION); + break; + default: + this._outputValue = await gptAPICall(promptAll, GPTCallType.PRONUNCIATION); + break; + } + }; + + handleResult = (e: SpeechRecognitionEvent) => { + let interimTranscript = ''; + let finalTranscript = ''; + for (let i = e.resultIndex; i < e.results.length; i++) { + const transcript = e.results[i][0].transcript; + if (e.results[i].isFinal) { + finalTranscript += transcript; + } else { + interimTranscript += transcript; + } + } + this._inputValue += finalTranscript; + }; + + fetchImages = async (selection: string) => { + try { + const { data } = await axios.get(`${API_URL}?query=${selection}&page=1&per_page=${1}&client_id=Q4zruu6k6lum2kExiGhLNBJIgXDxD6NNj0SRHH_XXU0`); + console.log(data.results); + const imageSnapshot = Docs.Create.ImageDocument(data.results[0].urls.small, { + _nativeWidth: Doc.NativeWidth(this.layoutDoc), + _nativeHeight: Doc.NativeHeight(this.layoutDoc), + x: NumCast(this.layoutDoc.x), + y: NumCast(this.layoutDoc.y), + onClick: FollowLinkScript(), + _width: 150, + _height: 150, + title: '--snapshot' + NumCast(this.layoutDoc._layout_currentTimecode) + ' image-', + }); + imageSnapshot['x'] = this.layoutDoc['x']; + imageSnapshot['y'] = this.layoutDoc['y']; + return imageSnapshot; + } catch (error) { + console.log(error); + } + }; + + static imageUrlToBase64 = async (imageUrl: string): Promise<string> => { + try { + const response = await fetch(imageUrl); + const blob = await response.blob(); + + return new Promise((resolve, reject) => { + const reader = new FileReader(); + reader.readAsDataURL(blob); + reader.onloadend = () => resolve(reader.result as string); + reader.onerror = error => reject(error); + }); + } catch (error) { + console.error('Error:', error); + throw error; + } + }; + + getImageDesc = async (u: string) => { + try { + const hrefBase64 = await ComparisonBox.imageUrlToBase64(u); + const response = await gptImageLabel(hrefBase64, 'Answer the following question as a short flashcard response. Do not include a label.' + (this.dataDoc.text as RichTextField)?.Text); + + DocCast(this.dataDoc[this.props.fieldKey + '_0'])[DocData].text = response; + } catch (error) { + console.log('Error'); + } + }; + + flipFlashcard = () => { + this.frontSide = !this.frontSide; + }; + + hoverFlip = (side: boolean) => { + if (this.revealOp === 'hover') this.layoutDoc[`_${this._props.fieldKey}_usePath`] = side; + }; + + testForTextFields = (whichSlot: string) => { + const slotData = Doc.Get(this.dataDoc, whichSlot, true); + const slotHasText = slotData instanceof RichTextField || typeof slotData === 'string'; + const subjectText = RTFCast(this.Document[this.fieldKey])?.Text.trim(); + const altText = RTFCast(this.Document[this.fieldKey + '_alternate'])?.Text.trim(); + const layoutTemplateString = + slotHasText ? FormattedTextBox.LayoutString(whichSlot): + whichSlot.endsWith('1') ? (subjectText !== undefined ? FormattedTextBox.LayoutString(this.fieldKey) : undefined) : + altText !== undefined ? FormattedTextBox.LayoutString(this.fieldKey + '_alternate'): undefined; // prettier-ignore + + // A bit hacky to try out the concept of using GPT to fill in flashcards + // If the second slot doesn't have anything in it, but the fieldKey slot has text (e.g., this.text is a string) + // and the fieldKey + "_alternate" has text that includes a GPT query (indicated by (( && )) ) that is parameterized (optionally) by the fieldKey text (this) or other metadata (this.<field>). + // eg., this.text_alternate is + // "((Provide a one sentence definition for (this) that doesn't use any word in (this.excludeWords) ))" + // where (this) is replaced by the text in the fieldKey slot abd this.excludeWords is repalced by the conetnts of the excludeWords field + // The GPT call will put the "answer" in the second slot of the comparison (eg., text_2) + if (whichSlot.endsWith('2') && !layoutTemplateString?.includes(whichSlot)) { + const queryText = altText?.replace('(this)', subjectText); // TODO: this should be done in Doc.setField but it doesn't know about the fieldKey ... + if (queryText?.match(/\(\(.*\)\)/)) { + Doc.SetField(this.Document, whichSlot, ':=' + queryText, false); // make the second slot be a computed field on the data doc that calls ChatGpt + } + } + return layoutTemplateString; + }; + + render() { + const clearButton = (which: string) => ( + <Tooltip title={<div className="dash-tooltip">remove</div>}> + <div + // style={{ position: 'relative', top: '0px', left: '10px' }} + ref={this._closeRef} + className={`clear-button ${which}`} + onPointerDown={e => this.closeDown(e, which)} // prevent triggering slider movement in registerSliding + > + <FontAwesomeIcon className={`clear-button ${which}`} icon="times" size="xs" /> + </div> + </Tooltip> + ); + const displayDoc = (whichSlot: string) => { + const whichDoc = DocCast(this.dataDoc[whichSlot]); + const targetDoc = DocCast(whichDoc?.annotationOn, whichDoc); + const layoutString = targetDoc ? '' : this.testForTextFields(whichSlot); + // whichDoc['backgroundColor'] = this.layoutDoc['backgroundColor']; + + return targetDoc || layoutString ? ( + // <MathJaxContext config={this.mathJaxConfig}> + // <MathJax> + <> + <DocumentView + // eslint-disable-next-line react/jsx-props-no-spreading + {...this._props} + ignoreUsePath={layoutString ? true : undefined} + renderDepth={this.props.renderDepth + 1} + LayoutTemplateString={layoutString} + Document={layoutString ? this.Document : targetDoc} + containerViewPath={this.DocumentView?.().docViewPath} + moveDocument={whichSlot.endsWith('1') ? this.moveDoc1 : this.moveDoc2} + removeDocument={whichSlot.endsWith('1') ? this.remDoc1 : this.remDoc2} + NativeWidth={this.layoutWidth} + NativeHeight={this.layoutHeight} + isContentActive={() => this.childActive} + isDocumentActive={returnFalse} + dontSelect={returnTrue} + whenChildContentsActiveChanged={this.whenChildContentsActiveChanged} + styleProvider={this.childActive ? this._props.styleProvider : this.docStyleProvider} + hideLinkButton + pointerEvents={this.childActive ? undefined : returnNone} + /> + {/* </MathJax> */} + {/* <div style={{ position: 'absolute', top: '-5px', left: '2px' }}>{layoutString ? null : clearButton(whichSlot)}</div> */} + {/* </MathJaxContext> // placeholder image if doc is missingleft: `${NumCast(this.layoutDoc.width, 200) - 33}px` */} + </> + ) : ( + <div className="placeholder"> + <FontAwesomeIcon className="upload-icon" icon="cloud-upload-alt" size="lg" /> + </div> + ); + }; + const displayBox = (which: string, index: number, cover: number) => ( + <div + className={`${index === 0 ? 'before' : 'after'}Box-cont`} + key={which} + style={{ width: this._props.PanelWidth() }} + onPointerDown={e => { + this.registerSliding(e, cover); + this.activateContent(); + }} + ref={ele => this.createDropTarget(ele, which, index)}> + {!this._isEmpty ? displayDoc(which) : null} + {/* {this.dataDoc[this.fieldKey + '_0'] !== 'empty' ? displayDoc(which) : null} */} + </div> + ); + + if (this.Document._layout_isFlashcard) { + const side = this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate' ? 1 : 0; + + // add text box to each side when comparison box is first created + // (!this.dataDoc[this.fieldKey + '_0'] && this.dataDoc[this._props.fieldKey + '_0'] !== 'empty') + if (!this.dataDoc[this.fieldKey + '_0'] && !this._isEmpty) { + const dataSplit = StrCast(this.dataDoc.data).includes('Keyword: ') ? StrCast(this.dataDoc.data).split('Keyword: ') : StrCast(this.dataDoc.data).split('Answer: '); + const newDoc = Docs.Create.TextDocument(dataSplit[1]); + this.addDoc(newDoc, this.fieldKey + '_0'); + } + + if (!this.dataDoc[this.fieldKey + '_1'] && !this._isEmpty) { + const dataSplit = StrCast(this.dataDoc.data).includes('Keyword: ') ? StrCast(this.dataDoc.data).split('Keyword: ') : StrCast(this.dataDoc.data).split('Answer: '); + const newDoc = Docs.Create.TextDocument(dataSplit[0]); + this.addDoc(newDoc, this.fieldKey + '_1'); + } + + if (DocCast(this.Document.embedContainer) && DocCast(this.Document.embedContainer).practiceMode === 'quiz') { + const text = StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text); + return ( + <div className={`comparisonBox${this._props.isContentActive() ? '-interactive' : ''}`} style={{ display: 'flex', flexDirection: 'column' }}> + <p style={{ color: 'white', padding: 10 }}>{text}</p> + <p style={{ display: text === '' ? 'flex' : 'none', color: 'white', marginLeft: '10px' }}>Return to all flashcards and add text to both sides. </p> + <div className="input-box"> + <textarea + value={this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate' ? this._outputValue : this._inputValue} + onChange={this.handleInputChange} + onScroll={e => { + e.stopPropagation(); + e.preventDefault(); + }} + placeholder={!this.layoutDoc[`_${this._props.fieldKey}_usePath`] ? 'Enter a response for GPT to evaluate.' : ''} + readOnly={this.layoutDoc[`_${this._props.fieldKey}_usePath`] === 'alternate'}></textarea> + + {this._loading ? ( + <div className="loading-spinner" style={{ position: 'absolute' }}> + <ReactLoading type="spin" height={30} width={30} color={'blue'} /> + </div> + ) : null} + </div> + <div> + <div className="submit-button" style={{ overflow: 'hidden', display: 'flex', width: '100%' }}> + <div + className="submit-buttonschema-header-button" + onPointerDown={e => this.openContextMenu(e.clientX, e.clientY, false)} + style={{ position: 'absolute', top: '5px', left: '11px', zIndex: '100', width: '5px', height: '5px', cursor: 'pointer' }}> + <FontAwesomeIcon color={'white'} icon="caret-down" /> + </div> + <button className="submit-buttonrecord" onClick={this._listening ? this.stopListening : this.startListening} style={{ background: this._listening ? 'lightgray' : '', borderRadius: '2px' }}> + {<FontAwesomeIcon icon="microphone" size="lg" />} + </button> + <div + className="submit-buttonschema-header-button" + onPointerDown={e => this.openContextMenu(e.clientX, e.clientY, true)} + style={{ position: 'absolute', top: '5px', left: '50px', zIndex: '100', width: '5px', height: '5px', cursor: 'pointer' }}> + <FontAwesomeIcon color={'white'} icon="caret-down" /> + </div> + <button + className="submit-buttonpronunciation" + onClick={this.evaluatePronunciation} + style={{ display: 'inline-flex', alignItems: 'center', background: this._listening ? 'lightgray' : '', borderRadius: '2px', width: '100%' }}> + Evaluate Pronunciation + </button> + + {this.layoutDoc[`_${this._props.fieldKey}_usePath`] !== 'alternate' ? ( + <button className="submit-buttonsubmit" type="button" onClick={this.handleRenderGPTClick} style={{ borderRadius: '2px', marginBottom: '3px', width: '100%' }}> + Submit + </button> + ) : ( + <button className="submit-buttonsubmit" type="button" onClick={this.handleRenderClick} style={{ display: 'inline-flex', alignItems: 'center', borderRadius: '2px', marginBottom: '3px', width: '100%' }}> + Redo the Question + </button> + )} + </div> + </div> + </div> + ); + } + + // render a normal flashcard when not a QuizCard + return ( + <div + className={`comparisonBox${this._props.isContentActive() ? '-interactive' : ''}`} /* change className to easily disable/enable pointer events in CSS */ + style={{ display: 'flex', flexDirection: 'column', overflow: 'hidden' }} + onMouseEnter={() => { + this.hoverFlip(false); + }} + onMouseLeave={() => { + this.hoverFlip(true); + }}> + {displayBox(`${this.fieldKey}_${side === 0 ? 1 : 0}`, side, this._props.PanelWidth() - 3)} + {this._loading ? ( + <div className="loading-spinner" style={{ position: 'absolute' }}> + <ReactLoading type="spin" height={30} width={30} color={'blue'} /> + </div> + ) : null} + {this._props.isContentActive() ? this.flashcardMenu : null} + {this.overlayAlternateIcon} + </div> + ); + } + // render a comparison box that compares items side by side + return ( + <div className={`comparisonBox${this._props.isContentActive() ? '-interactive' : ''}` /* change className to easily disable/enable pointer events in CSS */}> + {displayBox(`${this.fieldKey}_2`, 1, this._props.PanelWidth() - 3)} + <div className="clip-div" style={{ width: this.clipWidth + '%', transition: this._animating, background: StrCast(this.layoutDoc._backgroundColor, 'gray') }}> + {displayBox(`${this.fieldKey}_1`, 0, 0)} + </div> + + <div + className="slide-bar" + style={{ + left: `calc(${this.clipWidth + '%'} - 0.5px)`, + cursor: this.clipWidth < 5 ? 'e-resize' : this.clipWidth / 100 > (this._props.PanelWidth() - 5) / this._props.PanelWidth() ? 'w-resize' : undefined, + }} + onPointerDown={e => !this.childActive && this.registerSliding(e, this._props.PanelWidth() / 2)} /* if clicked, return slide-bar to center */ + > + <div className="slide-handle" /> + </div> + </div> + ); + } +} + +Docs.Prototypes.TemplateMap.set(DocumentType.COMPARISON, { + data: '', + layout: { view: ComparisonBox, dataField: 'data' }, + options: { + acl: '', + backgroundColor: 'gray', + dropAction: dropActionType.move, + waitForDoubleClickToClick: 'always', + _layout_reflowHorizontal: true, + _layout_reflowVertical: true, + _layout_nativeDimEditable: true, + systemIcon: 'BsLayoutSplit', + }, +}); diff --git a/src/client/views/nodes/ImageBox.tsx b/src/client/views/nodes/ImageBox.tsx index 86da64e5e..1b1431373 100644 --- a/src/client/views/nodes/ImageBox.tsx +++ b/src/client/views/nodes/ImageBox.tsx @@ -385,7 +385,11 @@ export class ImageBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { console.log('RESPONSE:'); console.log(response.data['boxes']); console.log(response.data['text']); - this.createBoxes(response.data['boxes'], response.data['text']); + if (response.data['boxes'].length != 0) { + this.createBoxes(response.data['boxes'], response.data['text']); + } else { + this._loading = false; + } }; createBoxes = (boxes: [[[number, number]]], texts: [string]) => { @@ -397,12 +401,13 @@ export class ImageBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { const height = coords[2][1] - coords[0][1]; const text = texts[i]; - const newCol = Docs.Create.TextDocument('', { + const newCol = Docs.Create.LabelDocument({ _width: width, //width * NumCast(this.dataDoc[this.fieldKey + '_nativeWidth']), _height: height, //height * NumCast(this.dataDoc[this.fieldKey + '_nativeHeight']), _layout_fitWidth: true, + title: '', // _layout_autoHeight: true, }); const scaling = 1 / (this._props.NativeDimScaling?.() || 1); @@ -414,6 +419,7 @@ export class ImageBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { newCol.forceActive = true; newCol.quiz = text; newCol.showQuiz = false; + newCol[DocData].textTransform = 'none'; this._quizBoxes.push(newCol); this.addDocument(newCol); this._loading = false; @@ -434,7 +440,6 @@ export class ImageBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { }; makeLabels = async () => { - this._loading = true; try { const hrefBase64 = await this.createCanvas(); this.pushInfo(quizMode.NORMAL, hrefBase64); @@ -443,43 +448,51 @@ export class ImageBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { } }; - levenshteinDistance = (a: string, b: string) => { - const an = a.length; - const bn = b.length; - const matrix = []; + levenshteinDistance = (str1: string, str2: string) => { + const len1 = str1.length; + const len2 = str2.length; + const dp = Array.from(Array(len1 + 1), () => Array(len2 + 1).fill(0)); + + if (len1 === 0) return len2; + if (len2 === 0) return len1; + + for (let i = 0; i <= len1; i++) dp[i][0] = i; + for (let j = 0; j <= len2; j++) dp[0][j] = j; + + for (let i = 1; i <= len1; i++) { + for (let j = 1; j <= len2; j++) { + const cost = str1[i - 1] === str2[j - 1] ? 0 : 1; + dp[i][j] = Math.min( + dp[i - 1][j] + 1, // deletion + dp[i][j - 1] + 1, // insertion + dp[i - 1][j - 1] + cost // substitution + ); + } + } - // Ensure non-zero length strings - if (an === 0) return bn; - if (bn === 0) return an; + return dp[len1][len2]; + }; - // Initialize the matrix - for (let i = 0; i <= an; i++) { - matrix[i] = [i]; - } - for (let j = 0; j <= bn; j++) { - matrix[0][j] = j; - } + jaccardSimilarity = (str1: string, str2: string) => { + const set1 = new Set(str1.split(' ')); + const set2 = new Set(str2.split(' ')); - // Populate the matrix - for (let i = 1; i <= an; i++) { - for (let j = 1; j <= bn; j++) { - if (a[i - 1] === b[j - 1]) { - matrix[i][j] = matrix[i - 1][j - 1]; - } else { - matrix[i][j] = Math.min( - matrix[i - 1][j - 1] + 1, // substitution - Math.min( - matrix[i][j - 1] + 1, // insertion - matrix[i - 1][j] + 1 // deletion - ) - ); - } - } - } + const intersection = new Set([...set1].filter(x => set2.has(x))); + const union = new Set([...set1, ...set2]); - return matrix[an][bn]; + return intersection.size / union.size; }; + stringSimilarity(str1: string, str2: string) { + const levenshteinDist = this.levenshteinDistance(str1, str2); + const levenshteinScore = 1 - levenshteinDist / Math.max(str1.length, str2.length); + + const jaccardScore = this.jaccardSimilarity(str1, str2); + + // Combine the scores with a higher weight on Jaccard similarity + return 0.5 * levenshteinScore + 0.5 * jaccardScore; + } + @computed get checkIcon() { return ( <Tooltip title={<div className="dash-tooltip">Check</div>}> @@ -501,9 +514,9 @@ export class ImageBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { } compareWords = (input: string, target: string) => { - const distance = this.levenshteinDistance(input.toLowerCase(), target.toLowerCase()); - const threshold = Math.max(input.length, target.length) * 0.2; // Allow up to 20% of the length as difference - return distance <= threshold; + const distance = this.stringSimilarity(input.toLowerCase(), target.toLowerCase()); + // const threshold = Math.max(input.length, target.length) * 0.2; // Allow up to 20% of the length as difference + return distance >= 0.7; }; extractHexAndSentences = (inputString: string) => { @@ -523,7 +536,7 @@ export class ImageBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { check = () => { this._loading = true; this._quizBoxes.forEach(async doc => { - const input = StrCast(RTFCast(DocCast(doc).text)?.Text); + const input = StrCast(doc[DocData].title); console.log('INP: ' + StrCast(input) + '; DOC: ' + StrCast(doc.quiz)); if (this._quizMode == quizMode.SMART && input) { const questionText = 'Question: What was labeled in this image?'; @@ -553,7 +566,7 @@ export class ImageBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { redo = () => { this._quizBoxes.forEach(doc => { - DocCast(doc)[DocData].text = ''; + doc[DocData].title = ''; doc.backgroundColor = '#e4e4e4'; doc.showQuiz = false; }); @@ -564,8 +577,10 @@ export class ImageBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { this._quizBoxes.forEach(doc => { // this._props.removeDocument?.(DocCast(doc)); // this._props.DocumentView?.()._props.removeDocument?.(doc); + this.removeDocument?.(doc); }); this._quizBoxes = []; + console.log('remove'); }; @action diff --git a/src/client/views/nodes/LabelBigText.js b/src/client/views/nodes/LabelBigText.js new file mode 100644 index 000000000..290152cd0 --- /dev/null +++ b/src/client/views/nodes/LabelBigText.js @@ -0,0 +1,270 @@ +/* +Brorlandi/big-text.js v1.0.0, 2017 +Adapted from DanielHoffmann/jquery-bigtext, v1.3.0, May 2014 +And from Jetroid/bigtext.js v1.0.0, September 2016 + +Usage: +BigText("#myElement",{ + rotateText: {Number}, (null) + fontSizeFactor: {Number}, (0.8) + maximumFontSize: {Number}, (null) + limitingDimension: {String}, ("both") + horizontalAlign: {String}, ("center") + verticalAlign: {String}, ("center") + textAlign: {String}, ("center") + whiteSpace: {String}, ("nowrap") +}); + + +Original Projects: +https://github.com/DanielHoffmann/jquery-bigtext +https://github.com/Jetroid/bigtext.js + +Options: + +rotateText: Rotates the text inside the element by X degrees. + +fontSizeFactor: This option is used to give some vertical spacing for letters that overflow the line-height (like 'g', 'Á' and most other accentuated uppercase letters). This does not affect the font-size if the limiting factor is the width of the parent div. The default is 0.8 + +maximumFontSize: maximum font size to use. + +minimumFontSize: minimum font size to use. if font is calculated smaller than this, text will be rendered at this size and wrapped + +limitingDimension: In which dimension the font size should be limited. Possible values: "width", "height" or "both". Defaults to both. Using this option with values different than "both" overwrites the element parent width or height. + +horizontalAlign: Where to align the text horizontally. Possible values: "left", "center", "right". Defaults to "center". + +verticalAlign: Where to align the text vertically. Possible values: "top", "center", "bottom". Defaults to "center". + +textAlign: Sets the text align of the element. Possible values: "left", "center", "right". Defaults to "center". This option is only useful if there are linebreaks (<br> tags) inside the text. + +whiteSpace: Sets whitespace handling. Possible values: "nowrap", "pre". Defaults to "nowrap". (Can also be set to enable wrapping but this doesn't work well.) + +Bruno Orlandi - 2017 + +Copyright (C) 2013 Daniel Hoffmann Bernardes, Ícaro Technologies +Copyright (C) 2016 Jet Holt + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +function _calculateInnerDimensions(computedStyle) { + //Calculate the inner width and height + var innerWidth; + var innerHeight; + + var width = parseInt(computedStyle.getPropertyValue("width")); + var height = parseInt(computedStyle.getPropertyValue("height")); + var paddingLeft = parseInt(computedStyle.getPropertyValue("padding-left")); + var paddingRight = parseInt(computedStyle.getPropertyValue("padding-right")); + var paddingTop = parseInt(computedStyle.getPropertyValue("padding-top")); + var paddingBottom = parseInt(computedStyle.getPropertyValue("padding-bottom")); + var borderLeft = parseInt(computedStyle.getPropertyValue("border-left-width")); + var borderRight = parseInt(computedStyle.getPropertyValue("border-right-width")); + var borderTop = parseInt(computedStyle.getPropertyValue("border-top-width")); + var borderBottom = parseInt(computedStyle.getPropertyValue("border-bottom-width")); + + //If box-sizing is border-box, we need to subtract padding and border. + var parentBoxSizing = computedStyle.getPropertyValue("box-sizing"); + if (parentBoxSizing == "border-box") { + innerWidth = width - (paddingLeft + paddingRight + borderLeft + borderRight); + innerHeight = height - (paddingTop + paddingBottom + borderTop + borderBottom); + } else { + innerWidth = width; + innerHeight = height; + } + var obj = {}; + obj["width"] = innerWidth; + obj["height"] = innerHeight; + return obj; +} + +export default function BigText(element, options) { + + if (typeof element === 'string') { + element = document.querySelector(element); + } else if (element.length) { + // Support for array based queries (such as jQuery) + element = element[0]; + } + + var defaultOptions = { + rotateText: null, + fontSizeFactor: 0.8, + maximumFontSize: null, + limitingDimension: "both", + horizontalAlign: "center", + verticalAlign: "center", + textAlign: "center", + whiteSpace: "nowrap", + singleLine: true + }; + + //Merge provided options and default options + options = options || {}; + for (var opt in defaultOptions) + if (defaultOptions.hasOwnProperty(opt) && !options.hasOwnProperty(opt)) + options[opt] = defaultOptions[opt]; + + //Get variables which we will reference frequently + var style = element.style; + var parent = element.parentNode; + var parentStyle = parent.style; + var parentComputedStyle = document.defaultView.getComputedStyle(parent); + + //hides the element to prevent "flashing" + style.visibility = "hidden"; + //Set some properties + style.display = "inline-block"; + style.clear = "both"; + style.float = "left"; + var fontSize = options.maximumFontSize; + if (options.singleLine) { + style.fontSize = (fontSize * options.fontSizeFactor) + "px"; + style.lineHeight = fontSize + "px"; + } else { + for (; fontSize > options.minimumFontSize; fontSize = fontSize - Math.min(fontSize / 2, Math.max(0, fontSize - 48) + 2)) { + style.fontSize = (fontSize * options.fontSizeFactor) + "px"; + style.lineHeight = "1"; + if (element.offsetHeight <= +parentComputedStyle.height.replace("px", "")) { + break; + } + } + } + style.whiteSpace = options.whiteSpace; + style.textAlign = options.textAlign; + style.position = "relative"; + style.padding = 0; + style.margin = 0; + style.left = "50%"; + style.top = "50%"; + var computedStyle = document.defaultView.getComputedStyle(element); + + //Get properties of parent to allow easier referencing later. + var parentPadding = { + top: parseInt(parentComputedStyle.getPropertyValue("padding-top")), + right: parseInt(parentComputedStyle.getPropertyValue("padding-right")), + bottom: parseInt(parentComputedStyle.getPropertyValue("padding-bottom")), + left: parseInt(parentComputedStyle.getPropertyValue("padding-left")), + }; + var parentBorder = { + top: parseInt(parentComputedStyle.getPropertyValue("border-top")), + right: parseInt(parentComputedStyle.getPropertyValue("border-right")), + bottom: parseInt(parentComputedStyle.getPropertyValue("border-bottom")), + left: parseInt(parentComputedStyle.getPropertyValue("border-left")), + }; + + //Calculate the parent inner width and height + var parentInnerDimensions = _calculateInnerDimensions(parentComputedStyle); + var parentInnerWidth = parentInnerDimensions["width"]; + var parentInnerHeight = parentInnerDimensions["height"]; + + var box = { + width: element.offsetWidth, //Note: This is slightly larger than the jQuery version + height: element.offsetHeight, + }; + if (!box.width || !box.height) return element; + + + if (options.rotateText !== null) { + if (typeof options.rotateText !== "number") + throw "bigText error: rotateText value must be a number"; + var rotate = "rotate(" + options.rotateText + "deg)"; + style.webkitTransform = rotate; + style.msTransform = rotate; + style.MozTransform = rotate; + style.OTransform = rotate; + style.transform = rotate; + //calculating bounding box of the rotated element + var sine = Math.abs(Math.sin(options.rotateText * Math.PI / 180)); + var cosine = Math.abs(Math.cos(options.rotateText * Math.PI / 180)); + box.width = element.offsetWidth * cosine + element.offsetHeight * sine; + box.height = element.offsetWidth * sine + element.offsetHeight * cosine; + } + + var parentWidth = (parentInnerWidth - parentPadding.left - parentPadding.right); + var parentHeight = (parentInnerHeight - parentPadding.top - parentPadding.bottom); + var widthFactor = parentWidth / box.width; + var heightFactor = parentHeight / box.height; + var lineHeight; + + if (options.limitingDimension.toLowerCase() === "width") { + lineHeight = Math.floor(widthFactor * fontSize); + } else if (options.limitingDimension.toLowerCase() === "height") { + lineHeight = Math.floor(heightFactor * fontSize); + } else if (widthFactor < heightFactor) + lineHeight = Math.floor(widthFactor * fontSize); + else if (widthFactor >= heightFactor) + lineHeight = Math.floor(heightFactor * fontSize); + + var fontSize = lineHeight * options.fontSizeFactor; + if (fontSize < options.minimumFontSize) { + parentStyle.display = "flex"; + parentStyle.alignItems = "center"; + style.textAlign = "center"; + style.visibility = ""; + style.fontSize = options.minimumFontSize + "px"; + style.lineHeight = ""; + style.overflow = "hidden"; + style.textOverflow = "ellipsis"; + style.top = ""; + style.left = ""; + style.margin = ""; + return element; + } + if (options.maximumFontSize && fontSize > options.maximumFontSize) { + fontSize = options.maximumFontSize; + lineHeight = fontSize / options.fontSizeFactor; + } + + style.fontSize = Math.floor(fontSize) + "px"; + style.lineHeight = Math.ceil(lineHeight) + "px"; + style.marginBottom = "0px"; + style.marginRight = "0px"; + + // if (options.limitingDimension.toLowerCase() === "height") { + // //this option needs the font-size to be set already so computedStyle.getPropertyValue("width") returns the right size + // //this +4 is to compensate the rounding erros that can occur due to the calls to Math.floor in the centering code + // parentStyle.width = (parseInt(computedStyle.getPropertyValue("width")) + 4) + "px"; + // } + + //Calculate the inner width and height + var innerDimensions = _calculateInnerDimensions(computedStyle); + var innerWidth = innerDimensions["width"]; + var innerHeight = innerDimensions["height"]; + + switch (options.verticalAlign.toLowerCase()) { + case "top": + style.top = "0%"; + break; + case "bottom": + style.top = "100%"; + style.marginTop = Math.floor(-innerHeight) + "px"; + break; + default: + style.marginTop = Math.ceil((-innerHeight / 2)) + "px"; + break; + } + + switch (options.horizontalAlign.toLowerCase()) { + case "left": + style.left = "0%"; + break; + case "right": + style.left = "100%"; + style.marginLeft = Math.floor(-innerWidth) + "px"; + break; + default: + style.marginLeft = Math.ceil((-innerWidth / 2)) + "px"; + break; + } + + //shows the element after the work is done + style.visibility = "visible"; + + return element; +} diff --git a/src/client/views/nodes/LabelBox.scss b/src/client/views/nodes/LabelBox.scss index 0b195713d..ca4b3d467 100644 --- a/src/client/views/nodes/LabelBox.scss +++ b/src/client/views/nodes/LabelBox.scss @@ -23,6 +23,41 @@ } } +.answer-icon { + position: absolute; + right: 8; + bottom: 5; + color: black; + display: inline-block; + font-size: 10px; + cursor: pointer; + border-radius: 50%; + overflow: hidden; +} + +.q-icon { + position: absolute; + right: 6; + bottom: 5; + color: white; + display: inline-block; + font-size: 10px; + cursor: pointer; + border-radius: 50%; + overflow: hidden; +} + +.edit-icon { + position: absolute; + right: 20; + bottom: 5; + display: inline-block; + font-size: 10px; + cursor: pointer; + border-radius: 50%; + overflow: hidden; +} + .labelBox-params { display: flex; flex-direction: row; diff --git a/src/client/views/nodes/LabelBox.tsx b/src/client/views/nodes/LabelBox.tsx index d33d12603..daf8e3300 100644 --- a/src/client/views/nodes/LabelBox.tsx +++ b/src/client/views/nodes/LabelBox.tsx @@ -1,10 +1,10 @@ import { Property } from 'csstype'; -import { action, computed, makeObservable, trace } from 'mobx'; +import { action, computed, makeObservable, observable, trace } from 'mobx'; import { observer } from 'mobx-react'; import * as React from 'react'; import * as textfit from 'textfit'; import { Field, FieldType } from '../../../fields/Doc'; -import { BoolCast, NumCast, StrCast } from '../../../fields/Types'; +import { BoolCast, NumCast, StrCast, RTFCast } from '../../../fields/Types'; import { DocumentType } from '../../documents/DocumentTypes'; import { Docs } from '../../documents/Documents'; import { DragManager } from '../../util/DragManager'; @@ -12,6 +12,10 @@ import { ViewBoxBaseComponent } from '../DocComponent'; import { PinDocView, PinProps } from '../PinFuncs'; import { StyleProp } from '../StyleProp'; import { FieldView, FieldViewProps } from './FieldView'; +import { Tooltip } from '@mui/material'; +import { emptyFunction } from '../../../Utils'; +import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; +import { returnFalse, setupMoveUpEvents } from '../../../ClientUtils'; import './LabelBox.scss'; @observer @@ -21,6 +25,7 @@ export class LabelBox extends ViewBoxBaseComponent<FieldViewProps>() { } private dropDisposer?: DragManager.DragDropDisposer; private _timeout: NodeJS.Timeout | undefined; + @observable private _editLabel = false; _divRef: HTMLDivElement | null = null; constructor(props: FieldViewProps) { @@ -43,6 +48,48 @@ export class LabelBox extends ViewBoxBaseComponent<FieldViewProps>() { return this._props.styleProvider?.(this.Document, this._props, StyleProp.BackgroundColor) as string; } + @computed get answerIcon() { + return ( + <Tooltip + title={ + <div className="answer-tooltip" style={{ minWidth: '150px' }}> + {StrCast(this.Document.quiz)} + </div> + }> + <div className="answer-tool-tip"> + <FontAwesomeIcon className="q-icon" icon="circle" color="white" /> + <FontAwesomeIcon className="answer-icon" icon="question" /> + </div> + </Tooltip> + ); + } + + @computed get editAnswer() { + return ( + <Tooltip + title={ + <div className="answer-tooltip" style={{ minWidth: '150px' }}> + {this._editLabel ? 'save' : 'edit correct answer'} + </div> + }> + <div className="answer-tool-tip" onPointerDown={e => setupMoveUpEvents(e.target, e, returnFalse, emptyFunction, () => this.editLabelAnswer())}> + <FontAwesomeIcon className="edit-icon" color={this._editLabel ? 'white' : 'black'} icon="pencil" size="sm" /> + </div> + </Tooltip> + ); + } + + editLabelAnswer = () => { + // when click the pencil, set the text to the quiz content. when click off, set the quiz text to that and set textbox to nothing. + if (!this._editLabel) { + this.dataDoc.title = StrCast(this.Document.quiz); + } else { + this.Document.quiz = this.Title; + this.dataDoc.title = ''; + } + this._editLabel = !this._editLabel; + }; + componentDidMount() { this._props.setContentViewBox?.(this); } @@ -121,7 +168,11 @@ export class LabelBox extends ViewBoxBaseComponent<FieldViewProps>() { width: this._props.PanelWidth(), height: this._props.PanelHeight(), whiteSpace: 'multiLine' in boxParams && boxParams.multiLine ? 'pre-wrap' : 'pre', - }}> + }} + // onMouseLeave={() => { + // this.hoverFlip(undefined); + // }} + > <div style={{ width: this._props.PanelWidth() - 2 * NumCast(this.layoutDoc._xPadding), @@ -133,10 +184,10 @@ export class LabelBox extends ViewBoxBaseComponent<FieldViewProps>() { })} onKeyUp={action(e => { e.stopPropagation(); - if (e.key === 'Enter') { - this.dataDoc[this.fieldKey] = this._divRef?.innerText ?? ''; - setTimeout(() => this._props.select(false)); - } + // if (e.key === 'Enter') { + this.dataDoc[this.fieldKey] = this._divRef?.innerText ?? ''; + setTimeout(() => this._props.select(false)); + // } })} onBlur={() => { this.dataDoc[this.fieldKey] = this._divRef?.innerText ?? ''; @@ -157,6 +208,8 @@ export class LabelBox extends ViewBoxBaseComponent<FieldViewProps>() { {label} </div> </div> + {this.Document.showQuiz ? this.answerIcon : null} + {this.Document.showQuiz ? this.editAnswer : null} </div> ); } diff --git a/src/client/views/nodes/formattedText/FormattedTextBox.scss b/src/client/views/nodes/formattedText/FormattedTextBox.scss index 71706084f..d6f13d9ee 100644 --- a/src/client/views/nodes/formattedText/FormattedTextBox.scss +++ b/src/client/views/nodes/formattedText/FormattedTextBox.scss @@ -79,37 +79,6 @@ audiotag:hover { right: 8px; position: absolute; } - .answer-icon { - position: absolute; - right: 10; - bottom: 10; - color: black; - display: inline-block; - font-size: 20px; - cursor: pointer; - border-radius: 50%; - } - - .q-icon { - position: absolute; - right: 6; - bottom: 10; - color: white; - display: inline-block; - font-size: 20px; - cursor: pointer; - border-radius: 50%; - } - - .edit-icon { - position: absolute; - right: 35; - bottom: 10; - display: inline-block; - font-size: 20px; - cursor: pointer; - border-radius: 50%; - } } .answer-tooltip { diff --git a/src/client/views/nodes/formattedText/FormattedTextBox.tsx b/src/client/views/nodes/formattedText/FormattedTextBox.tsx index e1ea93c3f..faef78469 100644 --- a/src/client/views/nodes/formattedText/FormattedTextBox.tsx +++ b/src/client/views/nodes/formattedText/FormattedTextBox.tsx @@ -114,7 +114,6 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FormattedTextB private _rules: RichTextRules | undefined; private _forceUncollapse = true; // if the cursor doesn't move between clicks, then the selection will disappear for some reason. This flags the 2nd click as happening on a selection which allows bullet points to toggle private _break = true; - @observable private _editLabel = false; public ProseRef?: HTMLDivElement; public get EditorView() { return this._editorView; @@ -2092,48 +2091,6 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FormattedTextB ); } - @computed get answerIcon() { - return ( - <Tooltip - title={ - <div className="answer-tooltip" style={{ minWidth: '150px' }}> - {StrCast(this.Document.quiz)} - </div> - }> - <div className="answer-tool-tip"> - <FontAwesomeIcon className="q-icon" icon="circle" color="white" /> - <FontAwesomeIcon className="answer-icon" icon="question" /> - </div> - </Tooltip> - ); - } - - @computed get editAnswer() { - return ( - <Tooltip - title={ - <div className="answer-tooltip" style={{ minWidth: '150px' }}> - {this._editLabel ? 'save' : 'edit correct answer'} - </div> - }> - <div className="answer-tool-tip" onPointerDown={e => setupMoveUpEvents(e.target, e, returnFalse, emptyFunction, () => this.editLabelAnswer())}> - <FontAwesomeIcon className="edit-icon" color={this._editLabel ? 'white' : 'black'} icon="pencil" size="sm" /> - </div> - </Tooltip> - ); - } - - editLabelAnswer = () => { - // when click the pencil, set the text to the quiz content. when click off, set the quiz text to that and set textbox to nothing. - if (!this._editLabel) { - this.dataDoc.text = StrCast(this.Document.quiz); - } else { - this.Document.quiz = RTFCast(this.dataDoc.text).Text; - this.dataDoc.text = ''; - } - this._editLabel = !this._editLabel; - }; - get fieldKey() { return this._fieldKey; } @@ -2252,8 +2209,6 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FormattedTextB {this.noSidebar || this.Document._layout_noSidebar || this.Document._createDocOnCR || this.layoutDoc._chromeHidden || this.Document.quiz ? null : this.sidebarHandle} {this.audioHandle} {this.layoutDoc._layout_enableAltContentUI && !this.layoutDoc._chromeHidden ? this.overlayAlternateIcon : null} - {this.Document.showQuiz ? this.answerIcon : null} - {this.Document.showQuiz ? this.editAnswer : null} </div> </div> ); diff --git a/src/client/views/webcam/WebCamLogic.js b/src/client/views/webcam/WebCamLogic.js new file mode 100644 index 000000000..5f6202bc8 --- /dev/null +++ b/src/client/views/webcam/WebCamLogic.js @@ -0,0 +1,292 @@ +'use strict'; +import io from "socket.io-client"; + +var socket; +var isChannelReady = false; +var isInitiator = false; +var isStarted = false; +var localStream; +var pc; +var remoteStream; +var turnReady; +var room; + +export function initialize(roomName, handlerUI) { + + var pcConfig = { + 'iceServers': [{ + 'urls': 'stun:stun.l.google.com:19302' + }] + }; + + // Set up audio and video regardless of what devices are present. + var sdpConstraints = { + offerToReceiveAudio: true, + offerToReceiveVideo: true + }; + + ///////////////////////////////////////////// + + room = roomName; + + socket = io.connect(`${window.location.protocol}//${window.location.hostname}:4321`); + + if (room !== '') { + socket.emit('create or join', room); + console.log('Attempted to create or join room', room); + } + + socket.on('created', function (room) { + console.log('Created room ' + room); + isInitiator = true; + }); + + socket.on('full', function (room) { + console.log('Room ' + room + ' is full'); + }); + + socket.on('join', function (room) { + console.log('Another peer made a request to join room ' + room); + console.log('This peer is the initiator of room ' + room + '!'); + isChannelReady = true; + }); + + socket.on('joined', function (room) { + console.log('joined: ' + room); + isChannelReady = true; + }); + + socket.on('log', function (array) { + console.log.apply(console, array); + }); + + //////////////////////////////////////////////// + + + // This client receives a message + socket.on('message', function (message) { + console.log('Client received message:', message); + if (message === 'got user media') { + maybeStart(); + } else if (message.type === 'offer') { + if (!isInitiator && !isStarted) { + maybeStart(); + } + pc.setRemoteDescription(new RTCSessionDescription(message)); + doAnswer(); + } else if (message.type === 'answer' && isStarted) { + pc.setRemoteDescription(new RTCSessionDescription(message)); + } else if (message.type === 'candidate' && isStarted) { + var candidate = new RTCIceCandidate({ + sdpMLineIndex: message.label, + candidate: message.candidate + }); + pc.addIceCandidate(candidate); + } else if (message === 'bye' && isStarted) { + handleRemoteHangup(); + } + }); + + //////////////////////////////////////////////////// + + var localVideo = document.querySelector('#localVideo'); + var remoteVideo = document.querySelector('#remoteVideo'); + + const gotStream = (stream) => { + console.log('Adding local stream.'); + localStream = stream; + localVideo.srcObject = stream; + sendMessage('got user media'); + if (isInitiator) { + maybeStart(); + } + } + + + navigator.mediaDevices.getUserMedia({ + audio: true, + video: true + }) + .then(gotStream) + .catch(function (e) { + alert('getUserMedia() error: ' + e.name); + }); + + + + var constraints = { + video: true + }; + + console.log('Getting user media with constraints', constraints); + + const requestTurn = (turnURL) => { + var turnExists = false; + for (var i in pcConfig.iceServers) { + if (pcConfig.iceServers[i].urls.substr(0, 5) === 'turn:') { + turnExists = true; + turnReady = true; + break; + } + } + if (!turnExists) { + console.log('Getting TURN server from ', turnURL); + // No TURN server. Get one from computeengineondemand.appspot.com: + var xhr = new XMLHttpRequest(); + xhr.onreadystatechange = function () { + if (xhr.readyState === 4 && xhr.status === 200) { + var turnServer = JSON.parse(xhr.responseText); + console.log('Got TURN server: ', turnServer); + pcConfig.iceServers.push({ + 'urls': 'turn:' + turnServer.username + '@' + turnServer.turn, + 'credential': turnServer.password + }); + turnReady = true; + } + }; + xhr.open('GET', turnURL, true); + xhr.send(); + } + } + + + + + if (location.hostname !== 'localhost') { + requestTurn( + `${window.location.origin}/corsProxy/${encodeURIComponent("https://computeengineondemand.appspot.com/turn?username=41784574&key=4080218913")}` + ); + } + + const maybeStart = () => { + console.log('>>>>>>> maybeStart() ', isStarted, localStream, isChannelReady); + if (!isStarted && typeof localStream !== 'undefined' && isChannelReady) { + console.log('>>>>>> creating peer connection'); + createPeerConnection(); + pc.addStream(localStream); + isStarted = true; + console.log('isInitiator', isInitiator); + if (isInitiator) { + doCall(); + } + } + }; + + window.onbeforeunload = function () { + sendMessage('bye'); + }; + + ///////////////////////////////////////////////////////// + + const createPeerConnection = () => { + try { + pc = new RTCPeerConnection(null); + pc.onicecandidate = handleIceCandidate; + pc.onaddstream = handleRemoteStreamAdded; + pc.onremovestream = handleRemoteStreamRemoved; + console.log('Created RTCPeerConnnection'); + } catch (e) { + console.log('Failed to create PeerConnection, exception: ' + e.message); + alert('Cannot create RTCPeerConnection object.'); + return; + } + } + + const handleIceCandidate = (event) => { + console.log('icecandidate event: ', event); + if (event.candidate) { + sendMessage({ + type: 'candidate', + label: event.candidate.sdpMLineIndex, + id: event.candidate.sdpMid, + candidate: event.candidate.candidate + }); + } else { + console.log('End of candidates.'); + } + } + + const handleCreateOfferError = (event) => { + console.log('createOffer() error: ', event); + } + + const doCall = () => { + console.log('Sending offer to peer'); + pc.createOffer(setLocalAndSendMessage, handleCreateOfferError); + } + + const doAnswer = () => { + console.log('Sending answer to peer.'); + pc.createAnswer().then( + setLocalAndSendMessage, + onCreateSessionDescriptionError + ); + } + + const setLocalAndSendMessage = (sessionDescription) => { + pc.setLocalDescription(sessionDescription); + console.log('setLocalAndSendMessage sending message', sessionDescription); + sendMessage(sessionDescription); + } + + const onCreateSessionDescriptionError = (error) => { + trace('Failed to create session description: ' + error.toString()); + } + + + + const handleRemoteStreamAdded = (event) => { + console.log('Remote stream added.'); + remoteStream = event.stream; + remoteVideo.srcObject = remoteStream; + handlerUI(); + + }; + + const handleRemoteStreamRemoved = (event) => { + console.log('Remote stream removed. Event: ', event); + } +} + +export function hangup() { + console.log('Hanging up.'); + stop(); + sendMessage('bye'); + if (localStream) { + localStream.getTracks().forEach(track => track.stop()); + } +} + +function stop() { + isStarted = false; + if (pc) { + pc.close(); + } + pc = null; +} + +function handleRemoteHangup() { + console.log('Session terminated.'); + stop(); + isInitiator = false; + if (localStream) { + localStream.getTracks().forEach(track => track.stop()); + } +} + +function sendMessage(message) { + console.log('Client sending message: ', message); + socket.emit('message', message, room); +}; + +export function refreshVideos() { + var localVideo = document.querySelector('#localVideo'); + var remoteVideo = document.querySelector('#remoteVideo'); + if (localVideo) { + localVideo.srcObject = localStream; + } + if (remoteVideo) { + remoteVideo.srcObject = remoteStream; + } + +}
\ No newline at end of file |