aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/client/views/nodes/AudioBox.tsx3
-rw-r--r--src/client/views/nodes/ComparisonBox.scss44
-rw-r--r--src/client/views/nodes/ComparisonBox.tsx52
-rw-r--r--src/client/views/nodes/ImageBox.tsx49
-rw-r--r--src/client/views/nodes/LabelBox.tsx8
-rw-r--r--src/client/views/nodes/PDFBox.tsx5
-rw-r--r--src/client/views/pdf/PDFViewer.tsx43
7 files changed, 99 insertions, 105 deletions
diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx
index aaeb1eb31..2c0a102f5 100644
--- a/src/client/views/nodes/AudioBox.tsx
+++ b/src/client/views/nodes/AudioBox.tsx
@@ -297,7 +297,6 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
},
});
this.Document[DocData].phoneticTranscription = response.data['transcription'];
- console.log('RESPONSE: ' + response.data['transcription']);
};
youtubeUpload = async () => {
@@ -310,13 +309,11 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
'Content-Type': 'application/json',
},
});
- console.log('RESPONSE: ' + response.data['transcription']);
};
// context menu
specificContextMenu = (): void => {
const funcs: ContextMenuProps[] = [];
- // funcs.push({ description: 'Push info', event: this.pushInfo, icon: 'redo-alt' });
funcs.push({
description: 'Youtube',
diff --git a/src/client/views/nodes/ComparisonBox.scss b/src/client/views/nodes/ComparisonBox.scss
index da1d352f2..b7307f3a3 100644
--- a/src/client/views/nodes/ComparisonBox.scss
+++ b/src/client/views/nodes/ComparisonBox.scss
@@ -30,12 +30,10 @@
padding-top: 5px;
padding-left: 5px;
padding-right: 5px;
- // width: 80%;
border-radius: 2px;
height: 17%;
display: inline-block;
bottom: 0;
- // right: 0;
&.schema-header-button {
color: gray;
@@ -57,28 +55,11 @@
}
&.submit {
width: 40%;
- // float: right;
-
- // position: absolute;
- // position: 10px;
- // padding-left: 35%;
- // padding-right: 80%;
- // // width: 80px;
- // // right: 0;
- // right: 0;
- // bottom: 0;
}
&.record {
width: 20%;
float: left;
border-radius: 2px;
- // right: 0;
- // height: 30%;
- }
-
- button {
- // flex: 1;
- // position: relative;
}
}
@@ -128,7 +109,6 @@
textarea {
flex: 1;
padding: 10px;
- // position: relative;
resize: none;
position: 'absolute';
width: '91%';
@@ -235,7 +215,6 @@
top: 10px;
left: 10px;
z-index: 200;
- // padding: 5px;
background: #dfdfdf;
pointer-events: none;
}
@@ -249,29 +228,8 @@
display: flex;
}
}
-
- // .input-box {
- // position: absolute;
- // padding: 10px;
- // }
- // input[type='text'] {
- // flex: 1;
- // position: relative;
- // margin-right: 10px;
- // width: 100px;
- // }
}
-// .quiz-card {
-// position: relative;
-
-// input[type='text'] {
-// flex: 1;
-// position: relative;
-// margin-right: 10px;
-// width: 100px;
-// }
-// }
.QuizCard {
width: 100%;
height: 100%;
@@ -288,8 +246,6 @@
align-items: center;
justify-content: center;
.QuizCardBox {
- /* existing code */
-
.DIYNodeBox-iframe {
height: 100%;
width: 100%;
diff --git a/src/client/views/nodes/ComparisonBox.tsx b/src/client/views/nodes/ComparisonBox.tsx
index d2a032d79..9fb8bc4d6 100644
--- a/src/client/views/nodes/ComparisonBox.tsx
+++ b/src/client/views/nodes/ComparisonBox.tsx
@@ -316,6 +316,10 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>()
ContextMenu.Instance.setLangIndex(ind);
};
+ /**
+ * Determine which language the speech to text tool is in.
+ * @returns
+ */
convertAbr = () => {
switch (this.recognition.lang) {
case 'en-US': return 'English'; //prettier-ignore
@@ -340,12 +344,58 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>()
ContextMenu.Instance.displayMenu(x, y);
};
+ /**
+ * Creates an AudioBox to record a user's audio.
+ */
evaluatePronunciation = () => {
const newAudio = Docs.Create.AudioDocument(nullAudio, { _width: 200, _height: 100 });
this.Document.audio = newAudio[DocData];
this._props.DocumentView?.()._props.addDocument?.(newAudio);
};
+ /**
+ * Gets the transcription of an audio recording by sending the
+ * recording to backend.
+ */
+ pushInfo = async () => {
+ const audio = {
+ file: this._audio.url,
+ };
+ const response = await axios.post('http://localhost:105/recognize/', audio, {
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ });
+ this.Document.phoneticTranscription = response.data['transcription'];
+ };
+
+ /**
+ * Extracts the id of the youtube video url.
+ * @param url
+ * @returns
+ */
+ getYouTubeVideoId = (url: string) => {
+ const regExp = /^.*(youtu.be\/|v\/|u\/\w\/|embed\/|watch\?v=|\&v=|\?v=)([^#\&\?]*).*/;
+ const match = url.match(regExp);
+ return match && match[2].length === 11 ? match[2] : null;
+ };
+
+ /**
+ * Gets the transcript of a youtube video by sending the video url to the backend.
+ * @returns transcription of youtube recording
+ */
+ youtubeUpload = async () => {
+ const audio = {
+ file: this.getYouTubeVideoId(StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text)),
+ };
+ const response = await axios.post('http://localhost:105/youtube/', audio, {
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ });
+ return response.data['transcription'];
+ };
+
createFlashcardPile(collectionArr: Doc[], gpt: boolean) {
const newCol = Docs.Create.CarouselDocument(collectionArr, {
_width: NumCast(this.layoutDoc['_' + this._props.fieldKey + '_width'], 250) + 50,
@@ -447,8 +497,6 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>()
} else if (callType === GPTCallType.FLASHCARD) {
this._loading = false;
return res;
- } else if (callType === GPTCallType.STACK) {
- /* empty */
}
this._loading = false;
return res;
diff --git a/src/client/views/nodes/ImageBox.tsx b/src/client/views/nodes/ImageBox.tsx
index 509a0c8d7..31f6df2ea 100644
--- a/src/client/views/nodes/ImageBox.tsx
+++ b/src/client/views/nodes/ImageBox.tsx
@@ -365,6 +365,12 @@ export class ImageBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
}
};
+ /**
+ * Calls backend to find any text on an image. Gets the text and the
+ * coordinates of the text and creates label boxes at those locations.
+ * @param quiz
+ * @param i
+ */
pushInfo = async (quiz: quizMode, i?: string) => {
this._quizMode = quiz;
this._loading = true;
@@ -434,6 +440,11 @@ export class ImageBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
this._loading = false;
};
+ /**
+ * Calls the createCanvas and pushInfo methods to convert the
+ * image to a form that can be passed to GPT and find the locations
+ * of the text.
+ */
makeLabels = async () => {
try {
const hrefBase64 = await this.createCanvas();
@@ -443,6 +454,13 @@ export class ImageBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
}
};
+ /**
+ * Determines whether two words should be considered
+ * the same, allowing minor typos.
+ * @param str1
+ * @param str2
+ * @returns
+ */
levenshteinDistance = (str1: string, str2: string) => {
const len1 = str1.length;
const len2 = str2.length;
@@ -468,6 +486,12 @@ export class ImageBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
return dp[len1][len2];
};
+ /**
+ * Different algorithm for determining string similarity.
+ * @param str1
+ * @param str2
+ * @returns
+ */
jaccardSimilarity = (str1: string, str2: string) => {
const set1 = new Set(str1.split(' '));
const set2 = new Set(str2.split(' '));
@@ -478,6 +502,14 @@ export class ImageBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
return intersection.size / union.size;
};
+ /**
+ * Averages the jaccardSimilarity and levenshteinDistance scores
+ * to determine string similarity for the labelboxes answers and
+ * the users response.
+ * @param str1
+ * @param str2
+ * @returns
+ */
stringSimilarity(str1: string, str2: string) {
const levenshteinDist = this.levenshteinDistance(str1, str2);
const levenshteinScore = 1 - levenshteinDist / Math.max(str1.length, str2.length);
@@ -508,12 +540,23 @@ export class ImageBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
);
}
+ /**
+ * Returns whether two strings are similar
+ * @param input
+ * @param target
+ * @returns
+ */
compareWords = (input: string, target: string) => {
const distance = this.stringSimilarity(input.toLowerCase(), target.toLowerCase());
- // const threshold = Math.max(input.length, target.length) * 0.2; // Allow up to 20% of the length as difference
return distance >= 0.7;
};
+ /**
+ * GPT returns a hex color for what color the label box should be based on
+ * the correctness of the users answer.
+ * @param inputString
+ * @returns
+ */
extractHexAndSentences = (inputString: string) => {
// Regular expression to match a hexadecimal number at the beginning followed by a period and sentences
const regex = /^#([0-9A-Fa-f]+)\.\s*(.+)$/s;
@@ -566,13 +609,15 @@ export class ImageBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
});
};
+ /**
+ * Get rid of all the label boxes on the images.
+ */
exitQuizMode = () => {
this._quizMode = quizMode.NONE;
this._quizBoxes.forEach(doc => {
this.removeDocument?.(doc);
});
this._quizBoxes = [];
- console.log('remove');
};
@action
diff --git a/src/client/views/nodes/LabelBox.tsx b/src/client/views/nodes/LabelBox.tsx
index 36f4624fe..058932457 100644
--- a/src/client/views/nodes/LabelBox.tsx
+++ b/src/client/views/nodes/LabelBox.tsx
@@ -169,11 +169,7 @@ export class LabelBox extends ViewBoxBaseComponent<FieldViewProps>() {
width: this._props.PanelWidth(),
height: this._props.PanelHeight(),
whiteSpace: 'multiLine' in boxParams && boxParams.multiLine ? 'pre-wrap' : 'pre',
- }}
- // onMouseLeave={() => {
- // this.hoverFlip(undefined);
- // }}
- >
+ }}>
<div
style={{
width: this._props.PanelWidth() - 2 * NumCast(this.layoutDoc._xPadding),
@@ -185,10 +181,8 @@ export class LabelBox extends ViewBoxBaseComponent<FieldViewProps>() {
})}
onKeyUp={action(e => {
e.stopPropagation();
- // if (e.key === 'Enter') {
this.dataDoc[this.fieldKey] = this._divRef?.innerText ?? '';
setTimeout(() => this._props.select(false));
- // }
})}
onBlur={() => {
this.dataDoc[this.fieldKey] = this._divRef?.innerText ?? '';
diff --git a/src/client/views/nodes/PDFBox.tsx b/src/client/views/nodes/PDFBox.tsx
index 7ef431885..816d4a3b0 100644
--- a/src/client/views/nodes/PDFBox.tsx
+++ b/src/client/views/nodes/PDFBox.tsx
@@ -120,11 +120,9 @@ export class PDFBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
this.replaceCanvases(docViewContent, newDiv);
const htmlString = this._pdfViewer?._mainCont.current && new XMLSerializer().serializeToString(newDiv);
- // const anchx = NumCast(cropping.x);
- // const anchy = NumCast(cropping.y);
const anchw = NumCast(cropping._width) * (this._props.NativeDimScaling?.() || 1);
const anchh = NumCast(cropping._height) * (this._props.NativeDimScaling?.() || 1);
- // const viewScale = 1;
+
cropping.title = 'crop: ' + this.Document.title;
cropping.x = NumCast(this.Document.x) + NumCast(this.layoutDoc._width);
cropping.y = NumCast(this.Document.y);
@@ -471,7 +469,6 @@ export class PDFBox extends ViewBoxAnnotatableComponent<FieldViewProps>() {
!Doc.noviceMode && optionItems.push({ description: 'Toggle Sidebar Type', event: this.toggleSidebarType, icon: 'expand-arrows-alt' });
!Doc.noviceMode && optionItems.push({ description: 'update icon', event: () => this.pdfUrl && this.updateIcon(), icon: 'expand-arrows-alt' });
- // optionItems.push({ description: "Toggle Sidebar ", event: () => this.toggleSidebar(), icon: "expand-arrows-alt" });
!options && ContextMenu.Instance.addItem({ description: 'Options...', subitems: optionItems, icon: 'asterisk' });
const help = cm.findByDescription('Help...');
const helpItems = help?.subitems ?? [];
diff --git a/src/client/views/pdf/PDFViewer.tsx b/src/client/views/pdf/PDFViewer.tsx
index 0876275d9..fc74a480e 100644
--- a/src/client/views/pdf/PDFViewer.tsx
+++ b/src/client/views/pdf/PDFViewer.tsx
@@ -30,12 +30,6 @@ import { GPTPopup } from './GPTPopup/GPTPopup';
import './PDFViewer.scss';
import { GPTCallType, gptAPICall } from '../../apis/gpt/GPT';
import ReactLoading from 'react-loading';
-// import html2canvas from 'html2canvas';
-// import SpeechRecognition, { useSpeechRecognition } from 'react-speech-recognition';
-
-// pdfjsLib.GlobalWorkerOptions.workerSrc = `/assets/pdf.worker.js`;
-// The workerSrc property shall be specified.
-// Pdfjs.GlobalWorkerOptions.workerSrc = 'https://unpkg.com/pdfjs-dist@4.4.168/build/pdf.worker.mjs';
interface IViewerProps extends FieldViewProps {
pdfBox: PDFBox;
@@ -64,43 +58,6 @@ export class PDFViewer extends ObservableReactComponent<IViewerProps> {
super(props);
makeObservable(this);
}
- // @observable transcriptRef = React.createRef();
- // @observable startBtnRef = React.createRef();
- // @observable stopBtnRef = React.createRef();
- // @observable transcriptElement = '';
-
- // handleResult = (e: SpeechRecognitionEvent) => {
- // let interimTranscript = '';
- // let finalTranscript = '';
- // console.log('H');
- // for (let i = e.resultIndex; i < e.results.length; i++) {
- // const transcript = e.results[i][0].transcript;
- // if (e.results[i].isFinal) {
- // finalTranscript += transcript;
- // } else {
- // interimTranscript += transcript;
- // }
- // }
- // console.log(interimTranscript);
- // this.transcriptElement = finalTranscript || interimTranscript;
- // };
-
- // startListening = () => {
- // const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
- // if (SpeechRecognition) {
- // console.log('here');
- // const recognition = new SpeechRecognition();
- // recognition.continuous = true; // Continue listening even if the user pauses
- // recognition.interimResults = true; // Show interim results
- // recognition.lang = 'en-US'; // Set language (optional)
- // recognition.onresult = this.handleResult.bind(this);
- // // recognition.onend = this.handleEnd.bind(this);
-
- // recognition.start();
- // // this.handleResult;
- // // recognition.stop();
- // }
- // };
@observable _pageSizes: { width: number; height: number }[] = [];
@observable _savedAnnotations = new ObservableMap<number, (HTMLDivElement & { marqueeing?: boolean })[]>();