aboutsummaryrefslogtreecommitdiff
path: root/src/client/apis/gpt/Summarization.ts
diff options
context:
space:
mode:
authorSophie Zhang <sophie_zhang@brown.edu>2023-03-01 23:33:01 -0500
committerSophie Zhang <sophie_zhang@brown.edu>2023-03-01 23:33:01 -0500
commitf189ce6f25b91fcd402b7e81ba8ed378e39e6142 (patch)
tree181a0903c6adff0975216dc63e175be2656f9486 /src/client/apis/gpt/Summarization.ts
parent08e15b05cd014f99726826c9db407e738040cdbb (diff)
Added text completion
Diffstat (limited to 'src/client/apis/gpt/Summarization.ts')
-rw-r--r--src/client/apis/gpt/Summarization.ts45
1 files changed, 30 insertions, 15 deletions
diff --git a/src/client/apis/gpt/Summarization.ts b/src/client/apis/gpt/Summarization.ts
index ba98ad591..b65736237 100644
--- a/src/client/apis/gpt/Summarization.ts
+++ b/src/client/apis/gpt/Summarization.ts
@@ -1,27 +1,41 @@
import { Configuration, OpenAIApi } from 'openai';
+enum GPTCallType {
+ SUMMARY = 'summary',
+ COMPLETION = 'completion',
+}
+
+type GPTCallOpts = {
+ model: string;
+ maxTokens: number;
+ temp: number;
+ prompt: string;
+};
+
+const callTypeMap: { [type: string]: GPTCallOpts } = {
+ summary: { model: 'text-davinci-003', maxTokens: 100, temp: 0.5, prompt: 'Summarize this text: ' },
+ completion: { model: 'text-davinci-003', maxTokens: 100, temp: 0.5, prompt: '' },
+};
+
/**
- * Summarizes the inputted text with OpenAI.
- *
- * @param text Text to summarize
- * @returns Summary of text
+ * Calls the OpenAI API.
+ *
+ * @param inputText Text to process
+ * @returns AI Output
*/
-const gptSummarize = async (text: string) => {
- text += '.';
- const model = 'text-curie-001'; // Most advanced: text-davinci-003
- const maxTokens = 200;
- const temp = 0.5;
-
+const gptAPICall = async (inputText: string, callType: GPTCallType) => {
+ if (callType === GPTCallType.SUMMARY) inputText += '.';
+ const opts: GPTCallOpts = callTypeMap[callType];
try {
const configuration = new Configuration({
apiKey: process.env.OPENAI_KEY,
});
const openai = new OpenAIApi(configuration);
const response = await openai.createCompletion({
- model: model,
- max_tokens: maxTokens,
- temperature: temp,
- prompt: `Summarize this text: ${text}`,
+ model: opts.model,
+ max_tokens: opts.maxTokens,
+ temperature: opts.temp,
+ prompt: `${opts.prompt}${inputText}`,
});
return response.data.choices[0].text;
} catch (err) {
@@ -30,4 +44,5 @@ const gptSummarize = async (text: string) => {
}
};
-export { gptSummarize };
+
+export { gptAPICall, GPTCallType};