diff options
-rw-r--r-- | dash-speech-to-text-101b507c5e39.json | 13 | ||||
-rw-r--r-- | src/client/views/StyleProviderQuiz.scss | 2 | ||||
-rw-r--r-- | src/client/views/pdf/AnchorMenu.tsx | 31 | ||||
-rw-r--r-- | src/client/views/pdf/Annotation.scss | 17 | ||||
-rw-r--r-- | src/client/views/pdf/PDFViewer.scss | 6 | ||||
-rw-r--r-- | src/client/views/pdf/PDFViewer.tsx | 103 |
6 files changed, 3 insertions, 169 deletions
diff --git a/dash-speech-to-text-101b507c5e39.json b/dash-speech-to-text-101b507c5e39.json deleted file mode 100644 index 62a7c7eb0..000000000 --- a/dash-speech-to-text-101b507c5e39.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "type": "service_account", - "project_id": "dash-speech-to-text", - "private_key_id": "101b507c5e394ec62f075d9416890bef0f597f7f", - "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQC1BQ4iu7RNbiVF\nuIk/7+Zby4mMy0fde/WaCc4VxEQELK8ys2UtLU92L0npSLOyLACtLcNexdihoPMs\nHcQV7jxj9KRlk7tx3CPuVj8mn52WNIxX5ds5BszQqrfNQGObfdeFsROFzcA2ysbz\nEyfkqoFSQU5OUYANwEaF9oXc5nEkZaz5Zjjy34g8USAlySbB2sKI4o372Xi0slJo\nuPYWn93k/IA7HAmCOeooc2LTi+gu2KCnkXKevXdJCcK9Lntbm9RjoSUQiR/6R0Ls\nCTXY/g3G2FMJiraqxeI4HeAU2Pyw0sl86nHYTHh3/wNRxQkpLXROAc5qlPbMbTzp\nEu7kLI7PAgMBAAECggEADQyQk00SrKOXCOqfl5NUo3i4XATqE6ZJndFny3E63KOC\npwCq93xqW6usWVEbWxRe9seeFik5pOp0Unwm1CnjV+qdJea7aXQkHEdCsCcWIMDS\n/nccZtpS+MEwahm31Hqthv6tLe/CE8ZpscDHbG16Jb19cUSLcEepuHh9TTTzd7Bu\n9iaQGNC4Kpfvnl4zxr5RlooQFBXI/izSnHqlKW6yz/j3ntpabDFp7U3ZDe7cRpJa\nsDXwOXSULrg7LRcV9sPyC4UwfEVNFdJJg7QxWcmJe4QYdDCNnZ7JnKvMcBgZmvyH\nbJZosZGMB3QW/BdPHugk5dmT2Zm7j/0axwNdIflFwQKBgQDddrnGLQMqZwQU+Ag6\nTFjXdSXaGDKRvK4W+PvsNla8kin1TU3NHd+GUGPnBbR+bOF5Y1m0IYEHOOtgVB6E\ni+RzytKPM6ghMDAkzLTrB1R9a1Aj0b2OWQrdPjDgDw2BHcGVmwHsy3i2v97e+1hn\nduEFHZOawg2LALlXqxXb+TyYrwKBgQDRP7k9wcnju1JOcfMFcUH1gcG6CmKoZwVK\nl7cGcKYaTAZZl4W8cZVuC+l/LPH4xXQYg9FqGQ3R6NQQFQV3g83ym2nQeALluuXJ\n7vlFtViu89uiiBtg9DL1wsbYnbUjOd4CYoW9GWv5Jy99SSHp4XIIcy6MNYwuRZaq\nObx5dViz4QKBgBWqBpRPVO7x8uFGPi/NxicVi6VhFplRFsqigqphymxZ9AbOEB1P\nnc0a0hsIYNa5OflQ8baATInzVHsw32dh8/Ar2BswF3NfrmX7LHuNVQOL2uQWIW/q\nX/iym79fWZfW3FgROMf8Q4vFCz1O4yAD7hE9CnJqJz8AzCC6WMITCI17AoGANMv3\nWXXEEwn4woJN51mkYuXSk7Z7HjKgSynsrFtQDeKQ15o7zEh8g7/NsqEMjNCFcooQ\nFM0th4FPwJ8NOhKvrD86e7dMQ9rckA8UJlqTfdFZ4bCl99il4wy6T327bp/zPHbP\nd98qNzuD6ADD5ddUhn1JiWcZb7NvSJQ40gyhzQECgYB7rMpNCq46NNozGktHRhuo\nGJjHiGgSM6vVPkC7DVJbU4jU2RjI7IUL2wfFEWDKOovkts7Ugp/KEFVcff4neeuz\nMomaVWaaT3xIKENUIAzSpqcqshkqDJ/2kAknD30IJXAvGuaK49rql5Vab/Rh6zVW\nWH5hqTQk5IXR02zFSIXwXg==\n-----END PRIVATE KEY-----\n", - "client_email": "af010101@dash-speech-to-text.iam.gserviceaccount.com", - "client_id": "110674669436677368261", - "auth_uri": "https://accounts.google.com/o/oauth2/auth", - "token_uri": "https://oauth2.googleapis.com/token", - "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", - "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/af010101%40dash-speech-to-text.iam.gserviceaccount.com", - "universe_domain": "googleapis.com" -} diff --git a/src/client/views/StyleProviderQuiz.scss b/src/client/views/StyleProviderQuiz.scss index 2f52c8dec..84b3f1fef 100644 --- a/src/client/views/StyleProviderQuiz.scss +++ b/src/client/views/StyleProviderQuiz.scss @@ -5,8 +5,6 @@ align-items: center; height: 100%; width: 100%; - // left: 50%; - // top: 50%; z-index: 200; font-size: 20px; font-weight: bold; diff --git a/src/client/views/pdf/AnchorMenu.tsx b/src/client/views/pdf/AnchorMenu.tsx index bff112017..e3a968180 100644 --- a/src/client/views/pdf/AnchorMenu.tsx +++ b/src/client/views/pdf/AnchorMenu.tsx @@ -29,8 +29,6 @@ export class AnchorMenu extends AntimodeMenu<AntimodeMenuProps> { private _commentRef = React.createRef<HTMLDivElement>(); private _cropRef = React.createRef<HTMLDivElement>(); @observable private _loading = false; - // @observable protected _top: number = -300; - // @observable protected _left: number = -300; constructor(props: AntimodeMenuProps) { super(props); @@ -113,28 +111,6 @@ export class AnchorMenu extends AntimodeMenu<AntimodeMenuProps> { } GPTPopup.Instance.setLoading(false); }; - // gptSummarize = async () => { - // GPTPopup.Instance?.setSelectedText(this._selectedText); - // GPTPopup.Instance.generateSummary(); - // }; - - /** - * Invokes the API with the selected text and stores it in the selected text. - * @param e pointer down event - */ - // gptPDFFlashcards = async () => { - // const queryText = this._selectedText; - // this._loading = true; - // try { - // const res = await gptAPICall(queryText, GPTCallType.FLASHCARD); - // console.log(res); - // // GPTPopup.Instance.setText(res || 'Something went wrong.'); - // this.transferToFlashcard(res || 'Something went wrong'); - // } catch (err) { - // console.error(err); - // } - // // GPTPopup.Instance.setLoading(false); - // }; /* * Transfers the flashcard text generated by GPT on flashcards and creates a collection out them. @@ -158,10 +134,8 @@ export class AnchorMenu extends AntimodeMenu<AntimodeMenuProps> { _layout_autoHeight: true, }); - console.log(collectionArr); newCol.x = x; newCol.y = y; - console.log(this._x); newCol.zIndex = 1000; this.addToCollection?.(newCol); @@ -315,11 +289,6 @@ export class AnchorMenu extends AntimodeMenu<AntimodeMenuProps> { /> </div> )} - {/* {this._loading ? ( - <div className="loading-spinner" style={{ position: 'absolute' }}> - <ReactLoading type="spin" height={30} width={30} color={'white'} /> - </div> - ) : null} */} </> ) : ( <> diff --git a/src/client/views/pdf/Annotation.scss b/src/client/views/pdf/Annotation.scss index 26856b74e..da7efe3da 100644 --- a/src/client/views/pdf/Annotation.scss +++ b/src/client/views/pdf/Annotation.scss @@ -8,20 +8,3 @@ cursor: pointer; } } -// .loading-spinner { -// display: flex; -// justify-content: center; -// align-items: center; -// height: 90%; -// width: 93%; -// left: 10; -// font-size: 20px; -// font-weight: bold; -// color: #0b0a0a; -// } - -// @keyframes spin { -// to { -// transform: rotate(360deg); -// } -// } diff --git a/src/client/views/pdf/PDFViewer.scss b/src/client/views/pdf/PDFViewer.scss index e70102ce9..a225c4b59 100644 --- a/src/client/views/pdf/PDFViewer.scss +++ b/src/client/views/pdf/PDFViewer.scss @@ -19,10 +19,6 @@ overflow-x: hidden; transform-origin: top left; - // .canvasWrapper { - // transform: scale(0.75); - // transform-origin: top left; - // } .textLayer { opacity: unset; mix-blend-mode: multiply; // bcz: makes text fuzzy! @@ -115,8 +111,6 @@ align-items: center; height: 100%; width: 100%; - // left: 50%; - // top: 50%; z-index: 200; font-size: 20px; font-weight: bold; diff --git a/src/client/views/pdf/PDFViewer.tsx b/src/client/views/pdf/PDFViewer.tsx index fc74a480e..18db214c8 100644 --- a/src/client/views/pdf/PDFViewer.tsx +++ b/src/client/views/pdf/PDFViewer.tsx @@ -395,114 +395,17 @@ export class PDFViewer extends ObservableReactComponent<IViewerProps> { } }; + /** + * Create a flashcard pile based on the selected text of a pdf. + */ gptPDFFlashcards = async () => { - // const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; - // if (SpeechRecognition) { - // this.recognition = new SpeechRecognition(); - // this.recognition.continuous = true; // Continue listening even if the user pauses - // this.recognition.interimResults = true; // Show interim results - // this.recognition.lang = 'en-US'; // Set language (optional) - - // this.recognition.onresult = this.handleResult; - // this.recognition.onerror = this.handleError; - // this.recognition.onend = this.handleEnd; - // } else { - // console.error("Browser doesn't support Speech Recognition API"); - // } - // const Dictaphone = () => { - // const { transcript, resetTranscript } = useSpeechRecognition(); - - // if (!SpeechRecognition.browserSupportsSpeechRecognition()) { - // return null; - // } - - // return ( - // <div> - // <button onClick={e => SpeechRecognition.startListening}>Start</button> - // <button onClick={e => SpeechRecognition.stopListening}>Stop</button> - // <button onClick={resetTranscript}>Reset</button> - // <p>{transcript}</p> - // </div> - // ); - // }; - // const grammar = - // '#JSGF V1.0; grammar colors; public <color> = aqua | azure | beige | bisque | black | blue | brown | chocolate | coral | crimson | cyan | fuchsia | ghostwhite | gold | goldenrod | gray | green | indigo | ivory | khaki | lavender | lime | linen | magenta | maroon | moccasin | navy | olive | orange | orchid | peru | pink | plum | purple | red | salmon | sienna | silver | snow | tan | teal | thistle | tomato | turquoise | violet | white | yellow ;'; - // const recognition = new SpeechRecognition(); - // const speechRecognitionList = new SpeechGrammarList(); - // speechRecognitionList.addFromString(grammar, 1); - // recognition.grammars = speechRecognitionList; - // recognition.continuous = false; - // recognition.lang = 'en-US'; - // recognition.interimResults = false; - // recognition.maxAlternatives = 1; - - // const diagnostic = document.querySelector('.output'); - // const bg = document.querySelector('html'); - - // document.body.onclick = () => { - // recognition.start(); - // console.log('Ready to receive a color command.'); - // }; - - // recognition.onresult = event => { - // const color = event.results[0][0].transcript; - // diagnostic!.textContent = `Result received: ${color}`; - // bg!.style.backgroundColor = color; - // }; - - //const SpeechRecognition = SpeechRecognition || webkitSpeechRecognition; - - // recognition.continous = true; - // recognition.interimResults = true; - // recognition.lang = 'en-US'; - const queryText = this._selectionText; - - // const canvas = await html2canvas(); - // const image = canvas.toDataURL("image/png", 1.0); - // (window as any) - // .html2canvas(this._marqueeref, { - // x: 100, - // y: 100, - // width: 100, - // height: 100, - // }) - // .then((canvas: HTMLCanvasElement) => { - // const img = canvas.toDataURL('image/png'); - - // const link = document.createElement('a'); - // link.href = img; - // link.download = 'screenshot.png'; - - // document.body.appendChild(link); - // link.click(); - // link.remove(); - // }); - - // var range = window.getSelection()?.getRangeAt(0); - // var selectionContents = range?.extractContents(); - // var div = document.createElement("div"); - // div.style.color = "yellow"; - // div.appendChild(selectionContents!); - // range!.insertNode(div); - - // const canvas = document.createElement('canvas'); - // const scaling = 1 / (this._props.NativeDimScaling?.() || 1); - // const w = AnchorMenu.Instance.marqueeWidth * scaling; - // const h = AnchorMenu.Instance.marqueeHeight * scaling; - // canvas.width = w; - // canvas.height = h; - // const ctx = canvas.getContext('2d'); // draw image to canvas. scale to target dimensions - // if (ctx) { - // this._marqueeref && ctx.drawImage(div, NumCast(this._marqueeref.current?.left) * scaling, NumCast(this._marqueeref.current?.top) * scaling, w, h, 0, 0, w, h); - // } this._loading = true; try { if (this._selectionText === '') { } const res = await gptAPICall(queryText, GPTCallType.FLASHCARD); - console.log(res); AnchorMenu.Instance.transferToFlashcard(res || 'Something went wrong', NumCast(this._props.layoutDoc['x']), NumCast(this._props.layoutDoc['y'])); this._selectionText = ''; } catch (err) { |