aboutsummaryrefslogtreecommitdiff
path: root/src/client/apis/gpt/GPT.ts
blob: 027c10e28b336f32c606effb4fbd727dfb75d1ac (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
import { ClientOptions, OpenAI } from 'openai';
import { ChatCompletionMessageParam } from 'openai/resources';

enum GPTCallType {
    SUMMARY = 'summary',
    COMPLETION = 'completion',
    EDIT = 'edit',
    MERMAID='mermaid'
}

type GPTCallOpts = {
    model: string;
    maxTokens: number;
    temp: number;
    prompt: string;
};

const callTypeMap: { [type: string]: GPTCallOpts } = {
    summary: { model: 'gpt-3.5-turbo-instruct', maxTokens: 256, temp: 0.5, prompt: 'Summarize this text in simpler terms: ' },
    edit: { model: 'gpt-3.5-turbo-instruct', maxTokens: 256, temp: 0.5, prompt: 'Reword this: ' },
    completion: { model: 'gpt-3.5-turbo-instruct', maxTokens: 256, temp: 0.5, prompt: '' },
    mermaid:{model:'gpt-4-turbo',maxTokens:2048,temp:0,prompt:"Write this in mermaid code and only give me the mermaid code (Heres an example of changing color of a pie chart to help you pie title Example \"Red\": 20 \"Blue\": 50 \"Green\": 30 %%{init: {'theme': 'base', 'themeVariables': {'pie1': '#0000FF', 'pie2': '#00FF00', 'pie3': '#FF0000'}}}%% keep in mind that pie1 is the highest since its sorted in descending order. Heres an example of a mindmap: mindmap  root((mindmap))    Origins      Long history      ::icon(fa fa-book)      Popularisation        British popular psychology author Tony Buzan    Research      On effectivness<br/>and features      On Automatic creation       Uses            Creative techniques            Strategic planning            Argument mapping    Tools      Pen and paper     Mermaid.  "}
};


/**`
 * Calls the OpenAI API.
 *
 * @param inputText Text to process
 * @returns AI Output
 */
const gptAPICall = async (inputText: string, callType: GPTCallType) => {
    if (callType === GPTCallType.SUMMARY) inputText += '.';
    const opts: GPTCallOpts = callTypeMap[callType];
    try {
        const configuration: ClientOptions = {
            apiKey: "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",
            dangerouslyAllowBrowser: true,
        };
        const openai = new OpenAI(configuration);

        let messages: ChatCompletionMessageParam[] = [
            { role: 'system', content: opts.prompt },
            { role: 'user', content: inputText },
        ];

        const response = await openai.chat.completions.create({
            model: opts.model,
            messages: messages,
            temperature: opts.temp,
            max_tokens: opts.maxTokens,
        });
        const content = response.choices[0].message.content;
        return content;
    } catch (err) {
        console.log(err);
        return 'Error connecting with API.';
    }
};

const gptImageCall = async (prompt: string, n?: number) => {
    try {
        const configuration: ClientOptions = {
            apiKey: process.env.OPENAI_KEY,
            dangerouslyAllowBrowser: true,
        };

        const openai = new OpenAI(configuration);
        const response = await openai.images.generate({
            prompt: prompt,
            n: n ?? 1,
            size: '1024x1024',
        });
        return response.data.map((data: any) => data.url);
        // return response.data.data[0].url;
    } catch (err) {
        console.error(err);
        return;
    }
};

export { gptAPICall, gptImageCall, GPTCallType };