1
0
mirror of https://github.com/velour/catbase.git synced 2025-04-03 19:51:42 +00:00
catbase/plugins/llm/chatgpt.go

53 lines
1.1 KiB
Go
Raw Normal View History

2024-05-11 13:56:29 -04:00
package llm
2023-03-03 11:37:34 -05:00
import (
"context"
"fmt"
"github.com/andrewstuart/openai"
2023-03-03 11:37:34 -05:00
)
2023-03-03 12:14:06 -05:00
var session openai.ChatSession
2023-03-03 11:37:34 -05:00
var client *openai.Client
2024-05-11 13:56:29 -04:00
func (p *LLMPlugin) getClient() (*openai.Client, error) {
2023-03-03 11:37:34 -05:00
token := p.c.Get("gpt.token", "")
if token == "" {
return nil, fmt.Errorf("no GPT token given")
}
2023-03-03 12:14:06 -05:00
return openai.NewClient(token)
2023-03-03 11:37:34 -05:00
}
2024-05-11 13:56:29 -04:00
func (p *LLMPlugin) chatGPT(request string) (string, error) {
2023-03-03 12:14:06 -05:00
if client == nil {
if err := p.setPrompt(p.getDefaultPrompt()); err != nil {
2023-03-03 11:37:34 -05:00
return "", err
}
}
if p.chatCount > p.c.GetInt("gpt.maxchats", 10) {
2024-03-23 08:12:44 -04:00
p.setPrompt(p.c.Get("gpt.lastprompt", p.getDefaultPrompt()))
p.chatCount = 0
}
p.chatCount++
2023-03-03 11:37:34 -05:00
return session.Complete(context.Background(), request)
}
2024-05-11 13:56:29 -04:00
func (p *LLMPlugin) getDefaultPrompt() string {
return p.c.Get("gpt.prompt", "")
2023-03-03 11:37:34 -05:00
}
2024-05-11 13:56:29 -04:00
func (p *LLMPlugin) setPrompt(prompt string) error {
2023-03-03 15:04:06 -05:00
var err error
client, err = p.getClient()
2023-03-03 11:37:34 -05:00
if err != nil {
return err
}
2023-03-03 12:14:06 -05:00
session = client.NewChatSession(prompt)
2024-03-23 08:12:44 -04:00
session.Model = p.c.Get("gpt.model", "gpt-3.5-turbo")
err = p.c.Set("gpt.lastprompt", prompt)
if err != nil {
return err
}
2023-03-03 11:37:34 -05:00
return nil
}