aboutsummaryrefslogtreecommitdiff
path: root/src/client/views/nodes/ComparisonBox.tsx
diff options
context:
space:
mode:
Diffstat (limited to 'src/client/views/nodes/ComparisonBox.tsx')
-rw-r--r--src/client/views/nodes/ComparisonBox.tsx748
1 files changed, 565 insertions, 183 deletions
diff --git a/src/client/views/nodes/ComparisonBox.tsx b/src/client/views/nodes/ComparisonBox.tsx
index 39a2e3a31..338604302 100644
--- a/src/client/views/nodes/ComparisonBox.tsx
+++ b/src/client/views/nodes/ComparisonBox.tsx
@@ -1,71 +1,194 @@
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { Tooltip } from '@mui/material';
-import { action, computed, makeObservable, observable } from 'mobx';
+import axios from 'axios';
+import { IReactionDisposer, action, computed, makeObservable, observable, reaction } from 'mobx';
import { observer } from 'mobx-react';
import * as React from 'react';
-import { returnFalse, returnNone, returnZero, setupMoveUpEvents } from '../../../ClientUtils';
+import ReactLoading from 'react-loading';
+import { returnFalse, returnNone, returnTrue, returnZero, setupMoveUpEvents } from '../../../ClientUtils';
import { emptyFunction } from '../../../Utils';
import { Doc, Opt } from '../../../fields/Doc';
import { DocData } from '../../../fields/DocSymbols';
+import { Id } from '../../../fields/FieldSymbols';
import { RichTextField } from '../../../fields/RichTextField';
import { DocCast, NumCast, RTFCast, StrCast, toList } from '../../../fields/Types';
-import { GPTCallType, gptAPICall } from '../../apis/gpt/GPT';
-import { DocUtils } from '../../documents/DocUtils';
-import { DocumentType } from '../../documents/DocumentTypes';
+import { nullAudio } from '../../../fields/URLField';
+import { GPTCallType, gptAPICall, gptImageLabel } from '../../apis/gpt/GPT';
+import { DocUtils, FollowLinkScript } from '../../documents/DocUtils';
+import { CollectionViewType, DocumentType } from '../../documents/DocumentTypes';
import { Docs } from '../../documents/Documents';
import { DragManager } from '../../util/DragManager';
import { dropActionType } from '../../util/DropActionTypes';
import { undoable } from '../../util/UndoManager';
+import { ContextMenu } from '../ContextMenu';
import { ViewBoxAnnotatableComponent } from '../DocComponent';
import { PinDocView, PinProps } from '../PinFuncs';
import { StyleProp } from '../StyleProp';
+import '../pdf/GPTPopup/GPTPopup.scss';
import './ComparisonBox.scss';
import { DocumentView } from './DocumentView';
import { FieldView, FieldViewProps } from './FieldView';
import { FormattedTextBox } from './formattedText/FormattedTextBox';
+const API_URL = 'https://api.unsplash.com/search/photos';
+
@observer
export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
public static LayoutString(fieldKey: string) {
return FieldView.LayoutString(ComparisonBox, fieldKey);
}
+ private _reactDisposer: IReactionDisposer | undefined;
private _disposers: (DragManager.DragDropDisposer | undefined)[] = [undefined, undefined];
private _closeRef = React.createRef<HTMLDivElement>();
- @observable _inputValue = '';
- @observable _outputValue = '';
- @observable _loading = false;
- @observable _errorMessage = '';
- @observable _outputMessage = '';
- @observable _animating = '';
+
+ SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
+
+ protected createDropTarget = (ele: HTMLDivElement | null, fieldKey: string, disposerId: number) => {
+ this._disposers[disposerId]?.();
+ if (ele) {
+ this._disposers[disposerId] = DragManager.MakeDropTarget(ele, (e, dropEvent) => this.internalDrop(e, dropEvent, fieldKey), this.layoutDoc);
+ }
+ };
constructor(props: FieldViewProps) {
super(props);
makeObservable(this);
+ this.setListening();
+ }
+
+ @observable private _mathJaxConfig = { loader: { load: ['input/asciimath'] } }; // prettier-ignore
+ @observable private _transcriptElement = '';
+ @observable private _inputValue = '';
+ @observable private _outputValue = '';
+ @observable private _loading = false;
+ @observable private _isEmpty = false;
+ @observable private _audio: Doc = Docs.Create.TextDocument('');
+ @observable private _childActive = false; // whether pointer events pass through comparison box to its contents or not
+ @observable private _animating = '';
+ @observable private _listening = false;
+ @observable private _frontSide = false;
+ @observable private _recognition = new this.SpeechRecognition();
+
+ @computed get revealOpKey() {
+ return `_${this.fieldKey}_revealOp`;
+ }
+ @computed get clipHeightKey() { return '_' + this.fieldKey + '_clipHeight'; } // prettier-ignore
+ @computed get clipWidthKey() { return '_' + this.fieldKey + '_clipWidth'; } // prettier-ignore
+ @computed get revealOp() { return this.layoutDoc[`_${this.fieldKey}_revealOp`]; } // prettier-ignore
+ @computed get clipWidth() {
+ return NumCast(this.layoutDoc[this.clipWidthKey], 50);
+ }
+ @computed get clipHeight() {
+ return NumCast(this.layoutDoc[this.clipHeightKey], 200);
+ }
+
+ @computed get overlayAlternateIcon() {
+ return (
+ <Tooltip title={<div className="dash-tooltip">flip</div>}>
+ <div
+ className="formattedTextBox-alternateButton"
+ onPointerDown={e =>
+ setupMoveUpEvents(e.target, e, returnFalse, emptyFunction, () => {
+ if (!this.revealOp || this.revealOp === 'flip') {
+ this.flipFlashcard();
+ }
+ })
+ }
+ style={{
+ background: this.revealOp === 'hover' ? 'gray' : this._frontSide ? 'white' : 'black',
+ color: this.revealOp === 'hover' ? 'black' : this._frontSide ? 'black' : 'white',
+ display: 'inline-block',
+ }}>
+ <div key="alternate" className="formattedTextBox-flip">
+ <FontAwesomeIcon icon="turn-up" size="1x" />
+ </div>
+ </div>
+ </Tooltip>
+ );
+ }
+
+ @computed get flashcardMenu() {
+ return (
+ <div>
+ <Tooltip
+ title={this._frontSide ? <div className="dash-tooltip">Flip to front side to use GPT</div> : <div className="dash-tooltip">Ask GPT to create an answer on the back side of the flashcard based on your question on the front</div>}>
+ <div style={{ position: 'absolute', bottom: '3px', right: '50px', cursor: 'pointer' }} onPointerDown={e => (!this._frontSide ? this.findImageTags() : null)}>
+ <FontAwesomeIcon icon="lightbulb" size="xl" />
+ </div>
+ </Tooltip>
+ {DocCast(this.Document.embedContainer).type_collection === 'carousel' ? null : (
+ <div>
+ <Tooltip title={<div>Create a flashcard pile</div>}>
+ <div style={{ position: 'absolute', bottom: '3px', right: '74px', cursor: 'pointer' }} onPointerDown={e => this.createFlashcardPile([this.Document], false)}>
+ <FontAwesomeIcon icon="folder-plus" size="xl" />
+ </div>
+ </Tooltip>
+ <Tooltip title={<div className="dash-tooltip">Create new flashcard stack based on text</div>}>
+ <div style={{ position: 'absolute', bottom: '3px', right: '104px', cursor: 'pointer' }} onClick={e => this.gptFlashcardPile()}>
+ <FontAwesomeIcon icon="layer-group" size="xl" />
+ </div>
+ </Tooltip>
+ </div>
+ )}
+ <Tooltip title={<div className="dash-tooltip">Hover to reveal</div>}>
+ <div style={{ position: 'absolute', bottom: '3px', right: '25px', cursor: 'pointer' }} onClick={e => this.handleHover()}>
+ <FontAwesomeIcon color={this.revealOp === 'hover' ? 'blue' : 'black'} icon="hand-point-up" size="xl" />
+ </div>
+ </Tooltip>
+ </div>
+ );
}
componentDidMount() {
this._props.setContentViewBox?.(this);
+ this._reactDisposer = reaction(
+ () => this._props.isSelected(), // when this reaction should update
+ selected => !selected && (this._childActive = false) // what it should update to
+ );
+ }
+ componentWillUnmount() {
+ this._disposers.forEach(d => d?.());
+ this._reactDisposer?.();
}
- protected createDropTarget = (ele: HTMLDivElement | null, fieldKey: string, disposerId: number) => {
- this._disposers[disposerId]?.();
- if (ele) {
- this._disposers[disposerId] = DragManager.MakeDropTarget(ele, (e, dropEvent) => this.internalDrop(e, dropEvent, fieldKey), this.layoutDoc);
+ @action handleHover = () => {
+ if (this.revealOp === 'hover') {
+ this.layoutDoc[this.revealOpKey] = 'flip';
+ this.Document.forceActive = false;
+ } else {
+ this.layoutDoc[this.revealOpKey] = 'hover';
+ this.Document.forceActive = true;
}
};
- @computed get revealOp() { return this.layoutDoc[`_${this.fieldKey}_revealOp`] as ('flip'|'hover'|undefined); } // prettier-ignore
- @computed get clipWidth() { return NumCast(this.layoutDoc[`_${this.fieldKey}_clipWidth`], 50); } // prettier-ignore
- set clipWidth(width: number) { this.layoutDoc[`_${this.fieldKey}_clipWidth`] = width; } // prettier-ignore
- @computed get useAlternate() { return this.layoutDoc[`_${this.fieldKey}_usePath`] === 'alternate'; } // prettier-ignore
- set useAlternate(alt: boolean) { this.layoutDoc[`_${this.fieldKey}_usePath`] = alt ? 'alternate' : undefined; } // prettier-ignore
+ handleInputChange = action((e: React.ChangeEvent<HTMLTextAreaElement>) => {
+ this._inputValue = e.target.value;
+ });
+
+ handleRenderClick = action(() => {
+ this._frontSide = !this._frontSide;
+ });
- animateClipWidth = action((clipWidth: number, duration = 200 /* ms */) => {
- this._animating = `all ${duration}ms`; // turn on clip animation transition, then turn it off at end of animation
- setTimeout(action(() => { this._animating = ''; }), duration); // prettier-ignore
- this.clipWidth = clipWidth;
+ handleRenderGPTClick = action(() => {
+ console.log('Phonetic transcription: ' + DocCast(this.Document.audio).phoneticTranscription);
+ const phonTrans = DocCast(this.Document.audio).phoneticTranscription;
+ if (phonTrans) {
+ this._inputValue = StrCast(phonTrans);
+ console.log('INPUT:' + this._inputValue);
+ this.askGPTPhonemes(this._inputValue);
+ } else if (this._inputValue) this.askGPT(GPTCallType.QUIZ);
+ this._frontSide = false;
+ this._outputValue = '';
});
+ onPointerMove = ({ movementX }: PointerEvent) => {
+ const width = movementX * this.ScreenToLocalBoxXf().Scale + (this.clipWidth / 100) * this._props.PanelWidth();
+ if (width < this._props.PanelWidth()) {
+ this.layoutDoc[this.clipWidthKey] = (Math.max(0, width) * 100) / this._props.PanelWidth();
+ }
+ return false;
+ };
+
internalDrop = undoable((e: Event, dropEvent: DragManager.DropEvent, fieldKey: string) => {
if (dropEvent.complete.docDragData) {
const { droppedDocuments } = dropEvent.complete.docDragData;
@@ -73,43 +196,12 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>()
Doc.SetContainer(droppedDocuments.lastElement(), this.dataDoc);
!added && e.preventDefault();
e.stopPropagation(); // prevent parent Doc from registering new position so that it snaps back into place
+ // this.childActive = false;
return added;
}
return undefined;
}, 'internal drop');
- registerSliding = (e: React.PointerEvent<HTMLDivElement>, targetWidth: number) => {
- if (e.button !== 2) {
- setupMoveUpEvents(
- this,
- e,
- this.onPointerMove,
- emptyFunction,
- action((clickEv, doubleTap) => {
- if (doubleTap) {
- this._isAnyChildContentActive = true;
- if (!this.dataDoc[this.fieldKey + '_1'] && !this.dataDoc[this.fieldKey]) this.dataDoc[this.fieldKey + '_1'] = DocUtils.copyDragFactory(Doc.UserDoc().emptyNote as Doc);
- if (!this.dataDoc[this.fieldKey + '_2'] && !this.dataDoc[this.fieldKey + '_alternate']) this.dataDoc[this.fieldKey + '_2'] = DocUtils.copyDragFactory(Doc.UserDoc().emptyNote as Doc);
- // DocumentView.addViewRenderedCb(DocCast(this.dataDoc[this.fieldKey + '_1']), dv => {
- // dv?.select(false);
- // });
- }
- }),
- true,
- undefined,
- () => !this._isAnyChildContentActive && this.animateClipWidth((targetWidth * 100) / this._props.PanelWidth())
- );
- }
- };
-
- onPointerMove = ({ movementX }: PointerEvent) => {
- const width = movementX * this.ScreenToLocalBoxXf().Scale + (this.clipWidth / 100) * this._props.PanelWidth();
- if (width > 5 && width < this._props.PanelWidth()) {
- this.clipWidth = (width * 100) / this._props.PanelWidth();
- }
- return false;
- };
-
getAnchor = (addAsAnnotation: boolean, pinProps?: PinProps) => {
const anchor = Docs.Create.ConfigDocument({
title: 'CompareAnchor:' + this.Document.title,
@@ -127,19 +219,19 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>()
};
clearDoc = undoable((fieldKey: string) => {
- delete this.dataDoc[fieldKey];
- this.dataDoc[fieldKey] = 'empty';
+ this.dataDoc[fieldKey] = undefined;
+ this._isEmpty = true;
}, 'clear doc');
- // clearDoc = (fieldKey: string) => delete this.dataDoc[fieldKey];
moveDoc = (doc: Doc, addDocument: (document: Doc | Doc[]) => boolean, which: string) => this.remDoc(doc, which) && addDocument(doc);
addDoc = (doc: Doc, which: string) => {
- if (this.dataDoc[which] && this.dataDoc[which] !== 'empty') return false;
+ if (this.dataDoc[which] && !this._isEmpty) return false;
this.dataDoc[which] = doc;
return true;
};
remDoc = (doc: Doc, which: string) => {
if (this.dataDoc[which] === doc) {
+ this._isEmpty = true;
// this.dataDoc[which] = 'empty';
this.dataDoc[which] = undefined;
return true;
@@ -173,123 +265,363 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>()
remDoc1 = (docs: Doc | Doc[]) => toList(docs).reduce((res, doc) => res && this.remDoc(doc, this.fieldKey + '_1'), true);
remDoc2 = (docs: Doc | Doc[]) => toList(docs).reduce((res, doc) => res && this.remDoc(doc, this.fieldKey + '_2'), true);
- /**
- * Tests for whether a comparison box slot (ie, before or after) has renderable text content.
- * If it does, render a FormattedTextBox for that slot that references the comparisonBox's slot field
- * @param whichSlot field key for start or end slot
- * @returns a JSX layout string if a text field is found, othwerise undefined
- */
- testForTextFields = (whichSlot: string) => {
- const slotData = Doc.Get(this.dataDoc, whichSlot, true);
- const slotHasText = slotData instanceof RichTextField || typeof slotData === 'string';
- const subjectText = RTFCast(this.Document[this.fieldKey])?.Text.trim();
- const altText = RTFCast(this.Document[this.fieldKey + '_alternate'])?.Text.trim();
- const layoutTemplateString =
- slotHasText ? FormattedTextBox.LayoutString(whichSlot):
- whichSlot.endsWith('1') ? (subjectText !== undefined ? FormattedTextBox.LayoutString(this.fieldKey) : undefined) :
- altText !== undefined ? FormattedTextBox.LayoutString(this.fieldKey + '_alternate'): undefined; // prettier-ignore
+ private registerSliding = (e: React.PointerEvent<HTMLDivElement>, targetWidth: number) => {
+ if (e.button !== 2) {
+ setupMoveUpEvents(
+ this,
+ e,
+ this.onPointerMove,
+ emptyFunction,
+ action((moveEv, doubleTap) => {
+ if (doubleTap) {
+ this._childActive = true;
+ if (!this.dataDoc[this.fieldKey + '_1'] && !this.dataDoc[this.fieldKey]) this.dataDoc[this.fieldKey + '_1'] = DocUtils.copyDragFactory(Doc.UserDoc().emptyNote as Doc);
+ if (!this.dataDoc[this.fieldKey + '_2'] && !this.dataDoc[this.fieldKey + '_alternate']) this.dataDoc[this.fieldKey + '_2'] = DocUtils.copyDragFactory(Doc.UserDoc().emptyNote as Doc);
+ }
+ }),
+ false,
+ undefined,
+ action(() => {
+ if (!this._childActive) {
+ this._animating = 'all 200ms';
+ // on click, animate slider movement to the targetWidth
+ this.layoutDoc[this.clipWidthKey] = (targetWidth * 100) / this._props.PanelWidth();
+ // this.layoutDoc[this.clipHeightKey] = (targetWidth * 100) / this._props.PanelHeight();
+ setTimeout(action(() => { this._animating = ''; }), 200); // prettier-ignore
+ }
+ })
+ );
+ }
+ };
- // A bit hacky to try out the concept of using GPT to fill in flashcards
- // If the second slot doesn't have anything in it, but the fieldKey slot has text (e.g., this.text is a string)
- // and the fieldKey + "_alternate" has text that includes a GPT query (indicated by (( && )) ) that is parameterized (optionally) by the fieldKey text (this) or other metadata (this.<field>).
- // eg., this.text_alternate is
- // "((Provide a one sentence definition for (this) that doesn't use any word in (this.excludeWords) ))"
- // where (this) is replaced by the text in the fieldKey slot abd this.excludeWords is repalced by the conetnts of the excludeWords field
- // The GPT call will put the "answer" in the second slot of the comparison (eg., text_2)
- if (whichSlot.endsWith('2') && !layoutTemplateString?.includes(whichSlot)) {
- const queryText = altText?.replace('(this)', subjectText); // TODO: this should be done in Doc.setField but it doesn't know about the fieldKey ...
- if (queryText?.match(/\(\(.*\)\)/)) {
- Doc.SetField(this.Document, whichSlot, ':=' + queryText, false); // make the second slot be a computed field on the data doc that calls ChatGpt
- }
+ setListening = () => {
+ const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
+ if (SpeechRecognition) {
+ this._recognition.continuous = true;
+ this._recognition.interimResults = true;
+ this._recognition.lang = 'en-US';
+ this._recognition.onresult = this.handleResult.bind(this);
}
- return layoutTemplateString;
+ ContextMenu.Instance.setLangIndex(0);
};
- /**
- * Flips a flashcard to the alternate side for the user to view.
- */
- flipFlashcard = () => {
- this.useAlternate = !this.useAlternate;
+ startListening = () => {
+ this._recognition.start();
+ this._listening = true;
};
- /**
- * Changes the view option to hover for a flashcard.
- */
- hoverFlip = (alternate: boolean) => {
- if (this.revealOp === 'hover') this.useAlternate = alternate;
+ stopListening = () => {
+ this._recognition.stop();
+ this._listening = false;
};
- /**
- * Creates the button used to flip the flashcards.
- */
- @computed get overlayAlternateIcon() {
- return (
- <Tooltip title={<div className="dash-tooltip">flip</div>}>
- <div
- className="formattedTextBox-alternateButton"
- onPointerDown={e =>
- setupMoveUpEvents(e.target, e, returnFalse, emptyFunction, () => {
- if (!this.revealOp || this.revealOp === 'flip') {
- this.flipFlashcard();
- console.log('Print Front of cards: ' + (RTFCast(DocCast(this.dataDoc[this.fieldKey + '_0']).text)?.Text ?? ''));
- console.log('Print Back of cards: ' + (RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text ?? ''));
- }
- })
- }
- style={{
- background: this.useAlternate ? 'white' : 'black',
- color: this.useAlternate ? 'black' : 'white',
- }}>
- <FontAwesomeIcon icon="turn-up" size="sm" />
- </div>
- </Tooltip>
- );
- }
+ setLanguage = (language: string, ind: number) => {
+ this._recognition.lang = language;
+ ContextMenu.Instance.setLangIndex(ind);
+ };
- @action handleRenderGPTClick = () => {
- // Call the GPT model and get the output
- this.useAlternate = true;
- this._outputValue = '';
- if (this._inputValue) this.askGPT();
+ convertAbr = () => {
+ switch (this._recognition.lang) {
+ case 'en-US': return 'English'; //prettier-ignore
+ case 'es-ES': return 'Spanish'; //prettier-ignore
+ case 'fr-FR': return 'French'; //prettier-ignore
+ case 'it-IT': return 'Italian'; //prettier-ignore
+ case 'zh-CH': return 'Mandarin Chinese'; //prettier-ignore
+ case 'ja': return 'Japanese'; //prettier-ignore
+ default: return 'Korean'; //prettier-ignore
+ }
+ };
+
+ openContextMenu = (x: number, y: number, evalu: boolean) => {
+ ContextMenu.Instance.clearItems();
+ ContextMenu.Instance.addItem({ description: 'English', event: e => this.setLanguage('en-US', 0), icon: 'question' }); //prettier-ignore
+ ContextMenu.Instance.addItem({ description: 'Spanish', event: e => this.setLanguage('es-ES', 1 ), icon: 'question'}); //prettier-ignore
+ ContextMenu.Instance.addItem({ description: 'French', event: e => this.setLanguage('fr-FR', 2), icon: 'question' }); //prettier-ignore
+ ContextMenu.Instance.addItem({ description: 'Italian', event: e => this.setLanguage('it-IT', 3), icon: 'question' }); //prettier-ignore
+ if (!evalu) ContextMenu.Instance.addItem({ description: 'Mandarin Chinese', event: e => this.setLanguage('zh-CH', 4), icon: 'question' }); //prettier-ignore
+ ContextMenu.Instance.addItem({ description: 'Japanese', event: e => this.setLanguage('ja', 5), icon: 'question' }); //prettier-ignore
+ ContextMenu.Instance.addItem({ description: 'Korean', event: e => this.setLanguage('ko', 6), icon: 'question' }); //prettier-ignore
+ ContextMenu.Instance.displayMenu(x, y);
};
- @action handleRenderClick = () => {
- // Call the GPT model and get the output
- this.useAlternate = false;
+ evaluatePronunciation = () => {
+ const newAudio = Docs.Create.AudioDocument(nullAudio, { _width: 200, _height: 100 });
+ this.Document.audio = newAudio[DocData];
+ this._props.DocumentView?.()._props.addDocument?.(newAudio);
};
- /**
- * Calls the GPT model to create QuizCards. Evaluates how similar the user's response is to the alternate
- * side of the flashcard.
- */
- askGPT = async (): Promise<string | undefined> => {
+ pushInfo = async () => {
+ const audio = {
+ file: this._audio.url,
+ };
+ const response = await axios.post('http://localhost:105/recognize/', audio, {
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ });
+ this.Document.phoneticTranscription = response.data['transcription'];
+ };
+
+ createFlashcardPile(collectionArr: Doc[], gpt: boolean) {
+ const newCol = Docs.Create.CarouselDocument(collectionArr, {
+ _width: NumCast(this.layoutDoc['_' + this.fieldKey + '_width'], 250) + 50,
+ _height: NumCast(this.layoutDoc['_' + this.fieldKey + '_width'], 200) + 50,
+ _layout_fitWidth: false,
+ _layout_autoHeight: true,
+ });
+ newCol.x = this.layoutDoc.x;
+ newCol.y = NumCast(this.layoutDoc.y) + 50;
+ newCol.type_collection = CollectionViewType.Carousel;
+
+ if (gpt) {
+ this._props.DocumentView?.()._props.addDocument?.(newCol);
+ this._props.removeDocument?.(this.Document);
+ } else {
+ this._props.addDocument?.(newCol);
+ this._props.removeDocument?.(this.Document);
+ this.Document.embedContainer = newCol;
+ }
+ }
+
+ gptFlashcardPile = async () => {
+ let text = await this.askGPT(GPTCallType.STACK);
+ let senArr = text?.split('Question: ');
+ let collectionArr: Doc[] = [];
+ for (let i = 1; i < senArr?.length!; i++) {
+ const newDoc = Docs.Create.ComparisonDocument(senArr![i], { _layout_isFlashcard: true, _width: 300, _height: 300 });
+
+ if (StrCast(senArr![i]).includes('Keyword: ')) {
+ const question = StrCast(senArr![i]).split('Keyword: ');
+ const img = await this.fetchImages(question[1]);
+ const textSide1 = question[0].includes('Answer: ') ? question[0].split('Answer: ')[0] : question[0];
+ const textDoc1 = Docs.Create.TextDocument(question[0]);
+ const rtfiel = new RichTextField(
+ JSON.stringify({
+ doc: {
+ type: 'doc',
+ content: [
+ {
+ type: 'paragraph',
+ attrs: { align: null, color: null, id: null, indent: null, inset: null, lineSpacing: null, paddingBottom: null, paddingTop: null },
+ content: [
+ { type: 'text', text: question[0].includes('Answer: ') ? question[0].split('Answer: ')[0] : question[0] },
+ { type: 'dashDoc', attrs: { width: '200px', height: '200px', title: 'dashDoc', float: 'unset', hidden: false, docId: img![Id] } },
+ ],
+ },
+ ],
+ },
+ selection: { type: 'text', anchor: 2, head: 2 },
+ }),
+ textSide1
+ );
+
+ textDoc1[DocData].text = rtfiel;
+ DocCast(newDoc)[DocData][this.fieldKey + '_1'] = textDoc1;
+ DocCast(newDoc)[DocData][this.fieldKey + '_0'] = Docs.Create.TextDocument(question[0].includes('Answer: ') ? question[0].split('Answer: ')[1] : question[1]);
+ }
+ collectionArr.push(newDoc);
+ }
+ this.createFlashcardPile(collectionArr, true);
+ };
+
+ askGPT = async (callType: GPTCallType): Promise<string | undefined> => {
const questionText = 'Question: ' + StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text);
const rubricText = ' Rubric: ' + StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_0']).text)?.Text);
const queryText = questionText + ' UserAnswer: ' + this._inputValue + '. ' + rubricText;
-
+ this._loading = true;
+ const doc = DocCast(this.dataDoc[this.props.fieldKey + '_0']);
+ if (callType == GPTCallType.CHATCARD) {
+ if (StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text) === '') {
+ this._loading = false;
+ return;
+ }
+ this.flipFlashcard();
+ }
try {
- const res = await gptAPICall(queryText, GPTCallType.QUIZ);
+ console.log(queryText);
+ const res = await gptAPICall(callType == GPTCallType.QUIZ ? queryText : questionText, callType);
if (!res) {
console.error('GPT call failed');
return;
}
- this._outputValue = res;
+ if (callType == GPTCallType.CHATCARD) {
+ DocCast(this.dataDoc[this.props.fieldKey + '_0'])[DocData].text = res;
+ } else if (callType == GPTCallType.QUIZ) {
+ console.log(this._inputValue);
+ this._outputValue = res.replace(/UserAnswer/g, "user's answer").replace(/Rubric/g, 'rubric');
+ } else if (callType === GPTCallType.FLASHCARD) {
+ this._loading = false;
+ return res;
+ } else if (callType === GPTCallType.STACK) {
+ }
+ this._loading = false;
+ return res;
} catch (err) {
console.error('GPT call failed');
}
+ this._loading = false;
};
+
layoutWidth = () => NumCast(this.layoutDoc.width, 200);
layoutHeight = () => NumCast(this.layoutDoc.height, 200);
+ findImageTags = async () => {
+ const c = this.DocumentView?.().ContentDiv!.getElementsByTagName('img');
+ if (c?.length === 0) await this.askGPT(GPTCallType.CHATCARD);
+ if (c) {
+ this._loading = true;
+ for (const i of c) {
+ if (i.className !== 'ProseMirror-separator') await this.getImageDesc(i.src);
+ }
+ this._loading = false;
+ }
+ };
+
+ askGPTPhonemes = async (phonemes: string) => {
+ const sentence = StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text);
+ const phon = 'w ʌ ɪ z j ɔː ɹ n e ɪ m ';
+ const phon2 = 'h ʌ ɛ r j ʌ t ʌ d eɪ';
+ const phon6 = 'huː ɑɹ juː tədeɪ';
+ const phon3 = 'ʃ eɪ oʊ s i ʃ oʊ z b aɪ ð ə s iː ʃ oʊ';
+ const phon4 = 'kamo estas hɔi';
+ const phon5 = 'la s e n a l';
+ const promptEng =
+ 'Consider all possible phonetic transcriptions of the intended sentence "' +
+ sentence +
+ '" that is standard in American speech without showing the user. Compare each word in the following phonemes with those phonetic transcriptions without displaying anything to the user: "' +
+ phon6 +
+ '". Steps to do this: Align the words with each word in the intended sentence by combining the phonemes to get a pronunciation that resembles the word in order. Do not describe phonetic corrections with the phonetic alphabet - describe it by providing other examples of how it should sound. Note if a word or sound missing, including missing vowels and consonants. If there is an additional word that does not match with the provided sentence, say so. For each word, if any letters mismatch and would sound weird in American speech and they are not allophones of the same phoneme and they are far away from each on the ipa vowel chat and that pronunciation is not normal for the meaning of the word, note this difference and explain how it is supposed to sound. Only note the difference if they are not allophones of the same phoneme and if they are far away on the vowel chart. The goal is to be understood, not sound like a native speaker. Just so you know, "i" sounds like "ee" as in "bee", not "ih" as an "lick". Interpret "ɹ" as the same as "r". Interpret "ʌ" as the same as "ə". If "ɚ", "ɔː", and "ɔ" are options for pronunciation, do not choose "ɚ". Ignore differences with colons. Ignore redundant letters and words and sounds and the splitting of words; do not mention this since there could be repeated words in the sentence. Provide a response like this: "Lets work on improving the pronunciation of "coffee." You said "ceeffee," which is close, but we need to adjust the vowel sound. In American English, "coffee" is pronounced /ˈkɔːfi/, with a long "aw" sound. Try saying "kah-fee." Your intonation is good, but try putting a bit more stress on "like" in the sentence "I would like a coffee with milk." This will make your speech sound more natural. Keep practicing, and lets try saying the whole sentence again!"';
+ const promptSpa =
+ 'Consider all possible phonetic transcriptions of the intended sentence "' +
+ 'como estás hoy' +
+ '" that is standard in Spanish speech without showing the user. Compare each word in the following phonemes with those phonetic transcriptions without displaying anything to the user: "' +
+ phon4 +
+ '". Steps to do this: Align the words with each word in the intended sentence by combining the phonemes to get a pronunciation that resembles the word in order. Do not describe phonetic corrections with the phonetic alphabet - describe it by providing other examples of how it should sound. Note if a word or sound missing, including missing vowels and consonants. If there is an additional word that does not match with the provided sentence, say so. For each word, if any letters mismatch and would sound weird in Spanish speech and they are not allophones of the same phoneme and they are far away from each on the ipa vowel chat and that pronunciation is not normal for the meaning of the word, note this difference and explain how it is supposed to sound. Only note the difference if they are not allophones of the same phoneme and if they are far away on the vowel chart; say good job if it would be understood by a native Spanish speaker. Just so you know, "i" sounds like "ee" as in "bee", not "ih" as an "lick". Interpret "ɹ" as the same as "r". Interpret "ʌ" as the same as "ə". Do not make "θ" and "f" interchangable. Do not make "n" and "ɲ" interchangable. Do not make "e" and "i" interchangable. If "ɚ", "ɔː", and "ɔ" are options for pronunciation, do not choose "ɚ". Ignore differences with colons. Ignore redundant letters and words and sounds and the splitting of words; do not mention this since there could be repeated words in the sentence. Identify "ɔi" sounds like "oy". Ignore accents and do not say anything to the user about this.';
+ const promptAll =
+ 'Consider all possible phonetic transcriptions of the intended sentence "' +
+ sentence +
+ '" that is standard in ' +
+ this.convertAbr() +
+ ' speech without showing the user. Compare each word in the following phonemes with those phonetic transcriptions without displaying anything to the user: "' +
+ phonemes +
+ '". Steps to do this: Align the words with each word in the intended sentence by combining the phonemes to get a pronunciation that resembles the word in order. Do not describe phonetic corrections with the phonetic alphabet - describe it by providing other examples of how it should sound. Note if a word or sound missing, including missing vowels and consonants. If there is an additional word that does not match with the provided sentence, say so. For each word, if any letters mismatch and would sound weird in ' +
+ this.convertAbr() +
+ ' speech and they are not allophones of the same phoneme and they are far away from each on the ipa vowel chat and that pronunciation is not normal for the meaning of the word, note this difference and explain how it is supposed to sound. Just so you know, "i" sounds like "ee" as in "bee", not "ih" as an "lick". Interpret "ɹ" as the same as "r". Interpret "ʌ" as the same as "ə". Do not make "θ" and "f" interchangable. Do not make "n" and "ɲ" interchangable. Do not make "e" and "i" interchangable. If "ɚ", "ɔː", and "ɔ" are options for pronunciation, do not choose "ɚ". Ignore differences with colons. Ignore redundant letters and words and sounds and the splitting of words; do not mention this since there could be repeated words in the sentence. Provide a response like this: "Lets work on improving the pronunciation of "coffee." You said "cawffee," which is close, but we need to adjust the vowel sound. In American English, "coffee" is pronounced /ˈkɔːfi/, with a long "aw" sound. Try saying "kah-fee." Your intonation is good, but try putting a bit more stress on "like" in the sentence "I would like a coffee with milk." This will make your speech sound more natural. Keep practicing, and lets try saying the whole sentence again!"';
+
+ switch (this._recognition.lang) {
+ case 'en-US': this._outputValue = await gptAPICall(promptEng, GPTCallType.PRONUNCIATION); break;
+ case 'es-ES': this._outputValue = await gptAPICall(promptSpa, GPTCallType.PRONUNCIATION); break;
+ default: this._outputValue = await gptAPICall(promptAll, GPTCallType.PRONUNCIATION); break;
+ } // prettier-ignore
+ };
+
+ handleResult = (e: SpeechRecognitionEvent) => {
+ let interimTranscript = '';
+ let finalTranscript = '';
+ for (let i = e.resultIndex; i < e.results.length; i++) {
+ const transcript = e.results[i][0].transcript;
+ if (e.results[i].isFinal) {
+ finalTranscript += transcript;
+ } else {
+ interimTranscript += transcript;
+ }
+ }
+ this._inputValue += finalTranscript;
+ };
+
+ fetchImages = async (selection: string) => {
+ try {
+ const { data } = await axios.get(`${API_URL}?query=${selection}&page=1&per_page=${1}&client_id=Q4zruu6k6lum2kExiGhLNBJIgXDxD6NNj0SRHH_XXU0`);
+ console.log(data.results);
+ const imageSnapshot = Docs.Create.ImageDocument(data.results[0].urls.small, {
+ _nativeWidth: Doc.NativeWidth(this.layoutDoc),
+ _nativeHeight: Doc.NativeHeight(this.layoutDoc),
+ x: NumCast(this.layoutDoc.x),
+ y: NumCast(this.layoutDoc.y),
+ onClick: FollowLinkScript(),
+ _width: 150,
+ _height: 150,
+ title: '--snapshot' + NumCast(this.layoutDoc._layout_currentTimecode) + ' image-',
+ });
+ imageSnapshot.x = this.layoutDoc.x;
+ imageSnapshot.y = this.layoutDoc.y;
+ return imageSnapshot;
+ } catch (error) {
+ console.log(error);
+ }
+ };
+
+ static imageUrlToBase64 = async (imageUrl: string): Promise<string> => {
+ try {
+ const blob = await fetch(imageUrl).then(response => response.blob());
+
+ return new Promise((resolve, reject) => {
+ const reader = new FileReader();
+ reader.readAsDataURL(blob);
+ reader.onloadend = () => resolve(reader.result as string);
+ reader.onerror = error => reject(error);
+ });
+ } catch (error) {
+ console.error('Error:', error);
+ throw error;
+ }
+ };
+
+ getImageDesc = async (u: string) => {
+ try {
+ const hrefBase64 = await ComparisonBox.imageUrlToBase64(u);
+ const response = await gptImageLabel(hrefBase64, 'Answer the following question as a short flashcard response. Do not include a label.' + (this.dataDoc.text as RichTextField)?.Text);
+
+ DocCast(this.dataDoc[this.props.fieldKey + '_0'])[DocData].text = response;
+ } catch (error) {
+ console.log('Error', error);
+ }
+ };
+
+ flipFlashcard = action(() => {
+ this._frontSide = !this._frontSide;
+ });
+
+ hoverFlip = (side: boolean) => {
+ if (this.revealOp === 'hover') this.layoutDoc[`_${this._props.fieldKey}_usePath`] = side;
+ };
+
+ testForTextFields = (whichSlot: string) => {
+ const slotData = Doc.Get(this.dataDoc, whichSlot, true);
+ const slotHasText = slotData instanceof RichTextField || typeof slotData === 'string';
+ const subjectText = RTFCast(this.Document[this.fieldKey])?.Text.trim();
+ const altText = RTFCast(this.Document[this.fieldKey + '_alternate'])?.Text.trim();
+ const layoutTemplateString =
+ slotHasText ? FormattedTextBox.LayoutString(whichSlot):
+ whichSlot.endsWith('1') ? (subjectText !== undefined ? FormattedTextBox.LayoutString(this.fieldKey) : undefined) :
+ altText !== undefined ? FormattedTextBox.LayoutString(this.fieldKey + '_alternate'): undefined; // prettier-ignore
+
+ // A bit hacky to try out the concept of using GPT to fill in flashcards
+ // If the second slot doesn't have anything in it, but the fieldKey slot has text (e.g., this.text is a string)
+ // and the fieldKey + "_alternate" has text that includes a GPT query (indicated by (( && )) ) that is parameterized (optionally) by the fieldKey text (this) or other metadata (this.<field>).
+ // eg., this.text_alternate is
+ // "((Provide a one sentence definition for (this) that doesn't use any word in (this.excludeWords) ))"
+ // where (this) is replaced by the text in the fieldKey slot abd this.excludeWords is repalced by the conetnts of the excludeWords field
+ // The GPT call will put the "answer" in the second slot of the comparison (eg., text_2)
+ if (whichSlot.endsWith('2') && !layoutTemplateString?.includes(whichSlot)) {
+ const queryText = altText?.replace('(this)', subjectText); // TODO: this should be done in Doc.setField but it doesn't know about the fieldKey ...
+ if (queryText?.match(/\(\(.*\)\)/)) {
+ Doc.SetField(this.Document, whichSlot, ':=' + queryText, false); // make the second slot be a computed field on the data doc that calls ChatGpt
+ }
+ }
+ return layoutTemplateString;
+ };
+
render() {
const clearButton = (which: string) => (
<Tooltip title={<div className="dash-tooltip">remove</div>}>
<div
+ // style={{ position: 'relative', top: '0px', left: '10px' }}
ref={this._closeRef}
className={`clear-button ${which}`}
onPointerDown={e => this.closeDown(e, which)} // prevent triggering slider movement in registerSliding
>
- <FontAwesomeIcon className={`clear-button ${which}`} icon="times" size="sm" />
+ <FontAwesomeIcon className={`clear-button ${which}`} icon="times" size="xs" />
</div>
</Tooltip>
);
@@ -297,8 +629,11 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>()
const whichDoc = DocCast(this.dataDoc[whichSlot]);
const targetDoc = DocCast(whichDoc?.annotationOn, whichDoc);
const layoutString = targetDoc ? '' : this.testForTextFields(whichSlot);
+ // whichDoc['backgroundColor'] = this.layoutDoc['backgroundColor'];
return targetDoc || layoutString ? (
+ // <MathJaxContext config={this._mathJaxConfig}>
+ // <MathJax>
<>
<DocumentView
// eslint-disable-next-line react/jsx-props-no-spreading
@@ -310,18 +645,21 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>()
renderDepth={this.props.renderDepth + 1}
LayoutTemplateString={layoutString}
Document={layoutString ? this.Document : targetDoc}
- containerViewPath={this.DocumentView?.().docViewPath}
+ containerViewPath={this._props.docViewPath}
moveDocument={whichSlot.endsWith('1') ? this.moveDoc1 : this.moveDoc2}
removeDocument={whichSlot.endsWith('1') ? this.remDoc1 : this.remDoc2}
- isContentActive={emptyFunction}
+ isContentActive={() => this._childActive}
isDocumentActive={returnFalse}
+ dontSelect={returnTrue}
whenChildContentsActiveChanged={this.whenChildContentsActiveChanged}
- styleProvider={this._isAnyChildContentActive ? this._props.styleProvider : this.docStyleProvider}
+ styleProvider={this._childActive ? this._props.styleProvider : this.docStyleProvider}
hideLinkButton
- pointerEvents={this._isAnyChildContentActive ? undefined : returnNone}
+ pointerEvents={this._childActive ? undefined : returnNone}
/>
- {layoutString ? null : clearButton(whichSlot)}
- </> // placeholder image if doc is missing
+ {/* </MathJax> */}
+ {/* <div style={{ position: 'absolute', top: '-5px', left: '2px' }}>{layoutString ? null : clearButton(whichSlot)}</div> */}
+ {/* </MathJaxContext> // placeholder image if doc is missingleft: `${NumCast(this.layoutDoc.width, 200) - 33}px` */}
+ </>
) : (
<div className="placeholder">
<FontAwesomeIcon className="upload-icon" icon="cloud-upload-alt" size="lg" />
@@ -329,56 +667,94 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>()
);
};
const displayBox = (which: string, index: number, cover: number) => (
- <div className={`${index === 0 ? 'before' : 'after'}Box-cont`} key={which} style={{ width: this._props.PanelWidth() }} onPointerDown={e => this.registerSliding(e, cover)} ref={ele => this.createDropTarget(ele, which, index)}>
- {displayDoc(which)}
+ <div
+ className={`${index === 0 ? 'before' : 'after'}Box-cont`}
+ key={which}
+ style={{ width: this._props.PanelWidth() }}
+ onPointerDown={e => {
+ this.registerSliding(e, cover);
+ this._childActive = true;
+ }}
+ ref={ele => this.createDropTarget(ele, which, index)}>
+ {!this._isEmpty ? displayDoc(which) : null}
+ {/* {this.dataDoc[this.fieldKey + '_0'] !== 'empty' ? displayDoc(which) : null} */}
</div>
);
if (this.Document._layout_isFlashcard) {
- const side = this.useAlternate ? 1 : 0;
+ const side = this._frontSide ? 1 : 0;
// add text box to each side when comparison box is first created
- if (!(this.dataDoc[this.fieldKey + '_0'] || this.dataDoc[this.fieldKey + '_0'] === 'empty')) {
- const dataSplit = StrCast(this.dataDoc.data).split('Answer');
+ // (!this.dataDoc[this.fieldKey + '_0'] && this.dataDoc[this._props.fieldKey + '_0'] !== 'empty')
+ if (!this.dataDoc[this.fieldKey + '_0'] && !this._isEmpty) {
+ const dataSplit = StrCast(this.dataDoc.data).includes('Keyword: ') ? StrCast(this.dataDoc.data).split('Keyword: ') : StrCast(this.dataDoc.data).split('Answer: ');
const newDoc = Docs.Create.TextDocument(dataSplit[1]);
- // if there is text from the pdf ai cards, put the question on the front side.
- // eslint-disable-next-line prefer-destructuring
- newDoc[DocData].text = dataSplit[1];
this.addDoc(newDoc, this.fieldKey + '_0');
}
- if (!(this.dataDoc[this.fieldKey + '_1'] || this.dataDoc[this.fieldKey + '_1'] === 'empty')) {
- const dataSplit = StrCast(this.dataDoc.data).split('Answer');
+
+ if (!this.dataDoc[this.fieldKey + '_1'] && !this._isEmpty) {
+ const dataSplit = StrCast(this.dataDoc.data).includes('Keyword: ') ? StrCast(this.dataDoc.data).split('Keyword: ') : StrCast(this.dataDoc.data).split('Answer: ');
const newDoc = Docs.Create.TextDocument(dataSplit[0]);
- // if there is text from the pdf ai cards, put the answer on the alternate side.
- // eslint-disable-next-line prefer-destructuring
- newDoc[DocData].text = dataSplit[0];
this.addDoc(newDoc, this.fieldKey + '_1');
}
- // render the QuizCards
- if (DocCast(this.Document.embedContainer) && DocCast(this.Document.embedContainer).filterOp === 'quiz') {
+ if (DocCast(this.Document.embedContainer) && DocCast(this.Document.embedContainer).practiceMode === 'quiz') {
+ const text = StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text);
return (
<div className={`comparisonBox${this._props.isContentActive() ? '-interactive' : ''}`} style={{ display: 'flex', flexDirection: 'column' }}>
- <p style={{ color: 'white', padding: 10 }}>{StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text)}</p>
- {/* {StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text)} */}
+ <p style={{ color: 'white', padding: 10 }}>{text}</p>
+ <p style={{ display: text === '' ? 'flex' : 'none', color: 'white', marginLeft: '10px' }}>Return to all flashcards and add text to both sides. </p>
<div className="input-box">
<textarea
- value={this.useAlternate ? this._outputValue : this._inputValue}
- onChange={action(e => {
- this._inputValue = e.target.value;
- })}
- readOnly={this.useAlternate}
- />
- </div>
- <div className="submit-button" style={{ display: this.useAlternate ? 'none' : 'flex' }}>
- <button type="button" onClick={this.handleRenderGPTClick}>
- Submit
- </button>
+ value={this._frontSide ? this._outputValue : this._inputValue}
+ onChange={this.handleInputChange}
+ onScroll={e => {
+ e.stopPropagation();
+ e.preventDefault();
+ }}
+ placeholder={!this.layoutDoc[`_${this._props.fieldKey}_usePath`] ? 'Enter a response for GPT to evaluate.' : ''}
+ readOnly={this._frontSide}></textarea>
+
+ {this._loading ? (
+ <div className="loading-spinner" style={{ position: 'absolute' }}>
+ <ReactLoading type="spin" height={30} width={30} color={'blue'} />
+ </div>
+ ) : null}
</div>
- <div className="submit-button" style={{ display: this.useAlternate ? 'flex' : 'none' }}>
- <button type="button" onClick={this.handleRenderClick}>
- Edit Your Response
- </button>
+ <div>
+ <div className="submit-button" style={{ overflow: 'hidden', display: 'flex', width: '100%' }}>
+ <div
+ className="submit-buttonschema-header-button"
+ onPointerDown={e => this.openContextMenu(e.clientX, e.clientY, false)}
+ style={{ position: 'absolute', top: '5px', left: '11px', zIndex: '100', width: '5px', height: '5px', cursor: 'pointer' }}>
+ <FontAwesomeIcon color={'white'} icon="caret-down" />
+ </div>
+ <button className="submit-buttonrecord" onClick={this._listening ? this.stopListening : this.startListening} style={{ background: this._listening ? 'lightgray' : '', borderRadius: '2px' }}>
+ {<FontAwesomeIcon icon="microphone" size="lg" />}
+ </button>
+ <div
+ className="submit-buttonschema-header-button"
+ onPointerDown={e => this.openContextMenu(e.clientX, e.clientY, true)}
+ style={{ position: 'absolute', top: '5px', left: '50px', zIndex: '100', width: '5px', height: '5px', cursor: 'pointer' }}>
+ <FontAwesomeIcon color={'white'} icon="caret-down" />
+ </div>
+ <button
+ className="submit-buttonpronunciation"
+ onClick={this.evaluatePronunciation}
+ style={{ display: 'inline-flex', alignItems: 'center', background: this._listening ? 'lightgray' : '', borderRadius: '2px', width: '100%' }}>
+ Evaluate Pronunciation
+ </button>
+
+ {this._frontSide ? (
+ <button className="submit-buttonsubmit" type="button" onClick={this.handleRenderGPTClick} style={{ borderRadius: '2px', marginBottom: '3px', width: '100%' }}>
+ Submit
+ </button>
+ ) : (
+ <button className="submit-buttonsubmit" type="button" onClick={this.handleRenderClick} style={{ display: 'inline-flex', alignItems: 'center', borderRadius: '2px', marginBottom: '3px', width: '100%' }}>
+ Redo the Question
+ </button>
+ )}
+ </div>
</div>
</div>
);
@@ -388,10 +764,16 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>()
return (
<div
className={`comparisonBox${this._props.isContentActive() ? '-interactive' : ''}`} /* change className to easily disable/enable pointer events in CSS */
- style={{ display: 'flex', flexDirection: 'column' }}
- onMouseEnter={() => this.hoverFlip(true)}
- onMouseLeave={() => this.hoverFlip(false)}>
+ style={{ display: 'flex', flexDirection: 'column', overflow: 'hidden' }}
+ onMouseEnter={() => this.hoverFlip(false)}
+ onMouseLeave={() => this.hoverFlip(true)}>
{displayBox(`${this.fieldKey}_${side === 0 ? 1 : 0}`, side, this._props.PanelWidth() - 3)}
+ {this._loading ? (
+ <div className="loading-spinner" style={{ position: 'absolute' }}>
+ <ReactLoading type="spin" height={30} width={30} color={'blue'} />
+ </div>
+ ) : null}
+ {this._props.isContentActive() ? this.flashcardMenu : null}
{this.overlayAlternateIcon}
</div>
);
@@ -407,7 +789,7 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>()
<div
className="slide-bar"
style={{
- left: `calc(${this.clipWidth + '%'} - 0.5px)`,
+ left: `min(100% - 3px, calc(${this.clipWidth + '%'}))`,
transition: this._animating,
cursor: this.clipWidth < 5 ? 'e-resize' : this.clipWidth / 100 > (this._props.PanelWidth() - 5) / this._props.PanelWidth() ? 'w-resize' : undefined,
}}