diff options
Diffstat (limited to 'src')
-rw-r--r-- | src/client/views/nodes/AudioBox.tsx | 19 | ||||
-rw-r--r-- | src/client/views/nodes/ComparisonBox.tsx | 7 | ||||
-rw-r--r-- | src/client/views/nodes/ImageBox.tsx | 1 |
3 files changed, 1 insertions, 26 deletions
diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 2c0a102f5..25e76e2a6 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -299,32 +299,15 @@ export class AudioBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { this.Document[DocData].phoneticTranscription = response.data['transcription']; }; - youtubeUpload = async () => { - console.log('Here'); - const audio = { - file: 'Cd2ch4XV84s&t=373s', - }; - const response = await axios.post('http://localhost:105/youtube/', audio, { - headers: { - 'Content-Type': 'application/json', - }, - }); - }; - // context menu specificContextMenu = (): void => { const funcs: ContextMenuProps[] = []; funcs.push({ - description: 'Youtube', - event: this.youtubeUpload, // prettier-ignore - icon: 'expand-arrows-alt', - }); - funcs.push({ description: (this.layoutDoc.hideAnchors ? "Don't hide" : 'Hide') + ' anchors', event: () => { this.layoutDoc.hideAnchors = !this.layoutDoc.hideAnchors; }, // prettier-ignore icon: 'expand-arrows-alt', - }); + }); // funcs.push({ description: (this.layoutDoc.dontAutoFollowLinks ? '' : "Don't") + ' follow links when encountered', event: () => { this.layoutDoc.dontAutoFollowLinks = !this.layoutDoc.dontAutoFollowLinks}, // prettier-ignore diff --git a/src/client/views/nodes/ComparisonBox.tsx b/src/client/views/nodes/ComparisonBox.tsx index f7c478729..ef66c2b11 100644 --- a/src/client/views/nodes/ComparisonBox.tsx +++ b/src/client/views/nodes/ComparisonBox.tsx @@ -527,12 +527,8 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() */ askGPTPhonemes = async (phonemes: string) => { const sentence = StrCast(RTFCast(DocCast(this.dataDoc[this.fieldKey + '_1']).text)?.Text); - const phon = 'w ʌ ɪ z j ɔː ɹ n e ɪ m '; - const phon2 = 'h ʌ ɛ r j ʌ t ʌ d eɪ'; const phon6 = 'huː ɑɹ juː tədeɪ'; - const phon3 = 'ʃ eɪ oʊ s i ʃ oʊ z b aɪ ð ə s iː ʃ oʊ'; const phon4 = 'kamo estas hɔi'; - const phon5 = 'la s e n a l'; const promptEng = 'Consider all possible phonetic transcriptions of the intended sentence "' + sentence + @@ -574,14 +570,11 @@ export class ComparisonBox extends ViewBoxAnnotatableComponent<FieldViewProps>() * @param e */ handleResult = (e: SpeechRecognitionEvent) => { - let interimTranscript = ''; let finalTranscript = ''; for (let i = e.resultIndex; i < e.results.length; i++) { const transcript = e.results[i][0].transcript; if (e.results[i].isFinal) { finalTranscript += transcript; - } else { - interimTranscript += transcript; } } this._inputValue += finalTranscript; diff --git a/src/client/views/nodes/ImageBox.tsx b/src/client/views/nodes/ImageBox.tsx index 0827eb062..b384e0059 100644 --- a/src/client/views/nodes/ImageBox.tsx +++ b/src/client/views/nodes/ImageBox.tsx @@ -180,7 +180,6 @@ export class ImageBox extends ViewBoxAnnotatableComponent<FieldViewProps>() { handleSelection = async (selection: string) => { this._searchInput = selection; - const images = await this.fetchImages(); }; drop = undoable((e: Event, de: DragManager.DropEvent) => { |