aboutsummaryrefslogtreecommitdiff
path: root/src/client/apis/gpt/GPT.ts
diff options
context:
space:
mode:
Diffstat (limited to 'src/client/apis/gpt/GPT.ts')
-rw-r--r--src/client/apis/gpt/GPT.ts50
1 files changed, 46 insertions, 4 deletions
diff --git a/src/client/apis/gpt/GPT.ts b/src/client/apis/gpt/GPT.ts
index 1f50ad0b8..7c53ee83e 100644
--- a/src/client/apis/gpt/GPT.ts
+++ b/src/client/apis/gpt/GPT.ts
@@ -5,8 +5,8 @@ enum GPTCallType {
SUMMARY = 'summary',
COMPLETION = 'completion',
EDIT = 'edit',
- // MERMAID='mermaid'
- SORT = 'sort'
+ SORT = 'sort',
+ DESCRIBE = 'describe'
}
type GPTCallOpts = {
@@ -20,7 +20,10 @@ const callTypeMap: { [type: string]: GPTCallOpts } = {
summary: { model: 'gpt-3.5-turbo-instruct', maxTokens: 256, temp: 0.5, prompt: 'Summarize this text in simpler terms: ' },
edit: { model: 'gpt-3.5-turbo-instruct', maxTokens: 256, temp: 0.5, prompt: 'Reword this: ' },
completion: { model: 'gpt-3.5-turbo-instruct', maxTokens: 256, temp: 0.5, prompt: '' },
- sort:{model:'gpt-4-turbo',maxTokens:2048,temp:0,prompt:"I'm going to give you a list of strings. Sort them into lists by shared content. The format of should be: '[string1, string2], [string 3, string4][string5, string6]' where the actual numbers of strings in each category / total categorys is arbritray and up to your interpretation"}
+ sort:{model:'gpt-4-turbo',maxTokens:2048,temp:0.5,prompt:"I'm going to give you a list of strings. Sort them into lists by shared content. The format of should be: [string1, string2], [string 3, string4], [string5, string6] where the actual numbers of strings in each category / total categorys is arbritray and up to your interpretation"},
+ describe:{model:'gpt-4-vision-preview',maxTokens:2048,temp:0,prompt:"Describe these images in 3-5 words"},
+
+
};
@@ -30,6 +33,7 @@ const callTypeMap: { [type: string]: GPTCallOpts } = {
* @param inputText Text to process
* @returns AI Output
*/
+
const gptAPICall = async (inputText: string, callType: GPTCallType) => {
if (callType === GPTCallType.SUMMARY) inputText += '.';
const opts: GPTCallOpts = callTypeMap[callType];
@@ -59,6 +63,44 @@ const gptAPICall = async (inputText: string, callType: GPTCallType) => {
}
};
+
+const gptImageLabel = async (imgUrl: string): Promise<string> => {
+ try {
+ const configuration: ClientOptions = {
+ apiKey: "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",
+ dangerouslyAllowBrowser: true,
+ };
+
+ const openai = new OpenAI(configuration);
+ const response = await openai.chat.completions.create({
+ model: 'gpt-4o',
+ messages: [
+ {
+ role: 'user',
+ content: [
+ { type: 'text', text: 'Describe this image in 3-5 words' },
+ {
+ type: 'image_url',
+ image_url: {
+ url: `${imgUrl}`,
+ },
+ },
+ ],
+ },
+ ],
+ });
+ if (response.choices[0].message.content) {
+ return response.choices[0].message.content;
+ } else {
+ return ':(';
+ }
+ } catch (err) {
+ console.log(err);
+ return 'Error connecting with API';
+ }
+};
+
+
const gptImageCall = async (prompt: string, n?: number) => {
try {
const configuration: ClientOptions = {
@@ -80,4 +122,4 @@ const gptImageCall = async (prompt: string, n?: number) => {
}
};
-export { gptAPICall, gptImageCall, GPTCallType }; \ No newline at end of file
+export { gptAPICall, gptImageCall, gptImageLabel, GPTCallType }; \ No newline at end of file