diff options
author | alyssaf16 <alyssa_feinberg@brown.edu> | 2024-10-08 10:49:12 -0400 |
---|---|---|
committer | alyssaf16 <alyssa_feinberg@brown.edu> | 2024-10-08 10:49:12 -0400 |
commit | a606005a5934913c38fba9b73886ee6e743aa635 (patch) | |
tree | 52b8a8bcfdb4e52e76762ee90e940da61f9e5010 /src/client/views/pdf/PDFViewer.tsx | |
parent | b67de10bc8b2eab16de7bb5dd4a7a8646e2a8d5f (diff) |
get rid of commented out code
Diffstat (limited to 'src/client/views/pdf/PDFViewer.tsx')
-rw-r--r-- | src/client/views/pdf/PDFViewer.tsx | 103 |
1 files changed, 3 insertions, 100 deletions
diff --git a/src/client/views/pdf/PDFViewer.tsx b/src/client/views/pdf/PDFViewer.tsx index fc74a480e..18db214c8 100644 --- a/src/client/views/pdf/PDFViewer.tsx +++ b/src/client/views/pdf/PDFViewer.tsx @@ -395,114 +395,17 @@ export class PDFViewer extends ObservableReactComponent<IViewerProps> { } }; + /** + * Create a flashcard pile based on the selected text of a pdf. + */ gptPDFFlashcards = async () => { - // const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; - // if (SpeechRecognition) { - // this.recognition = new SpeechRecognition(); - // this.recognition.continuous = true; // Continue listening even if the user pauses - // this.recognition.interimResults = true; // Show interim results - // this.recognition.lang = 'en-US'; // Set language (optional) - - // this.recognition.onresult = this.handleResult; - // this.recognition.onerror = this.handleError; - // this.recognition.onend = this.handleEnd; - // } else { - // console.error("Browser doesn't support Speech Recognition API"); - // } - // const Dictaphone = () => { - // const { transcript, resetTranscript } = useSpeechRecognition(); - - // if (!SpeechRecognition.browserSupportsSpeechRecognition()) { - // return null; - // } - - // return ( - // <div> - // <button onClick={e => SpeechRecognition.startListening}>Start</button> - // <button onClick={e => SpeechRecognition.stopListening}>Stop</button> - // <button onClick={resetTranscript}>Reset</button> - // <p>{transcript}</p> - // </div> - // ); - // }; - // const grammar = - // '#JSGF V1.0; grammar colors; public <color> = aqua | azure | beige | bisque | black | blue | brown | chocolate | coral | crimson | cyan | fuchsia | ghostwhite | gold | goldenrod | gray | green | indigo | ivory | khaki | lavender | lime | linen | magenta | maroon | moccasin | navy | olive | orange | orchid | peru | pink | plum | purple | red | salmon | sienna | silver | snow | tan | teal | thistle | tomato | turquoise | violet | white | yellow ;'; - // const recognition = new SpeechRecognition(); - // const speechRecognitionList = new SpeechGrammarList(); - // speechRecognitionList.addFromString(grammar, 1); - // recognition.grammars = speechRecognitionList; - // recognition.continuous = false; - // recognition.lang = 'en-US'; - // recognition.interimResults = false; - // recognition.maxAlternatives = 1; - - // const diagnostic = document.querySelector('.output'); - // const bg = document.querySelector('html'); - - // document.body.onclick = () => { - // recognition.start(); - // console.log('Ready to receive a color command.'); - // }; - - // recognition.onresult = event => { - // const color = event.results[0][0].transcript; - // diagnostic!.textContent = `Result received: ${color}`; - // bg!.style.backgroundColor = color; - // }; - - //const SpeechRecognition = SpeechRecognition || webkitSpeechRecognition; - - // recognition.continous = true; - // recognition.interimResults = true; - // recognition.lang = 'en-US'; - const queryText = this._selectionText; - - // const canvas = await html2canvas(); - // const image = canvas.toDataURL("image/png", 1.0); - // (window as any) - // .html2canvas(this._marqueeref, { - // x: 100, - // y: 100, - // width: 100, - // height: 100, - // }) - // .then((canvas: HTMLCanvasElement) => { - // const img = canvas.toDataURL('image/png'); - - // const link = document.createElement('a'); - // link.href = img; - // link.download = 'screenshot.png'; - - // document.body.appendChild(link); - // link.click(); - // link.remove(); - // }); - - // var range = window.getSelection()?.getRangeAt(0); - // var selectionContents = range?.extractContents(); - // var div = document.createElement("div"); - // div.style.color = "yellow"; - // div.appendChild(selectionContents!); - // range!.insertNode(div); - - // const canvas = document.createElement('canvas'); - // const scaling = 1 / (this._props.NativeDimScaling?.() || 1); - // const w = AnchorMenu.Instance.marqueeWidth * scaling; - // const h = AnchorMenu.Instance.marqueeHeight * scaling; - // canvas.width = w; - // canvas.height = h; - // const ctx = canvas.getContext('2d'); // draw image to canvas. scale to target dimensions - // if (ctx) { - // this._marqueeref && ctx.drawImage(div, NumCast(this._marqueeref.current?.left) * scaling, NumCast(this._marqueeref.current?.top) * scaling, w, h, 0, 0, w, h); - // } this._loading = true; try { if (this._selectionText === '') { } const res = await gptAPICall(queryText, GPTCallType.FLASHCARD); - console.log(res); AnchorMenu.Instance.transferToFlashcard(res || 'Something went wrong', NumCast(this._props.layoutDoc['x']), NumCast(this._props.layoutDoc['y'])); this._selectionText = ''; } catch (err) { |