diff options
author | geireann <geireann.lindfield@gmail.com> | 2023-03-02 11:44:04 -0500 |
---|---|---|
committer | geireann <geireann.lindfield@gmail.com> | 2023-03-02 11:44:04 -0500 |
commit | c6425a0469727305f76d00e3f8c545e04aad61cc (patch) | |
tree | ff8eb7d202f9f8c1305adcf2d4d5933c8c8dca63 /src/client/apis/gpt/Summarization.ts | |
parent | 4a60851bd4d3495b2605863e3070c73129c9bc57 (diff) | |
parent | d34d212ea550a3c1ca16747fadeb9e69c936cb5d (diff) |
Merge branch 'pres-trail-sophie' of https://github.com/brown-dash/Dash-Web into pres-trail-sophie
Diffstat (limited to 'src/client/apis/gpt/Summarization.ts')
-rw-r--r-- | src/client/apis/gpt/Summarization.ts | 41 |
1 files changed, 33 insertions, 8 deletions
diff --git a/src/client/apis/gpt/Summarization.ts b/src/client/apis/gpt/Summarization.ts index 931e0e48f..b65736237 100644 --- a/src/client/apis/gpt/Summarization.ts +++ b/src/client/apis/gpt/Summarization.ts @@ -1,23 +1,48 @@ import { Configuration, OpenAIApi } from 'openai'; -const gptSummarize = async (text: string) => { - text += '.'; +enum GPTCallType { + SUMMARY = 'summary', + COMPLETION = 'completion', +} + +type GPTCallOpts = { + model: string; + maxTokens: number; + temp: number; + prompt: string; +}; + +const callTypeMap: { [type: string]: GPTCallOpts } = { + summary: { model: 'text-davinci-003', maxTokens: 100, temp: 0.5, prompt: 'Summarize this text: ' }, + completion: { model: 'text-davinci-003', maxTokens: 100, temp: 0.5, prompt: '' }, +}; + +/** + * Calls the OpenAI API. + * + * @param inputText Text to process + * @returns AI Output + */ +const gptAPICall = async (inputText: string, callType: GPTCallType) => { + if (callType === GPTCallType.SUMMARY) inputText += '.'; + const opts: GPTCallOpts = callTypeMap[callType]; try { const configuration = new Configuration({ apiKey: process.env.OPENAI_KEY, }); const openai = new OpenAIApi(configuration); const response = await openai.createCompletion({ - model: 'text-curie-001', - max_tokens: 256, - temperature: 0.7, - prompt: `Summarize this text in one sentence: ${text}`, + model: opts.model, + max_tokens: opts.maxTokens, + temperature: opts.temp, + prompt: `${opts.prompt}${inputText}`, }); return response.data.choices[0].text; } catch (err) { console.log(err); - return ''; + return 'Error connecting with API.'; } }; -export { gptSummarize }; + +export { gptAPICall, GPTCallType}; |