catbase/plugins/llm/gpt.go

107 lines
2.5 KiB
Go
Raw Normal View History

2024-05-11 17:56:29 +00:00
package llm
2024-05-11 14:37:57 +00:00
import (
"errors"
"fmt"
"github.com/rs/zerolog/log"
"github.com/velour/catbase/bot"
"github.com/velour/catbase/config"
"regexp"
"slices"
)
const gpt3URL = "https://api.openai.com/v1/engines/%s/completions"
const gpt3ModURL = "https://api.openai.com/v1/moderations"
2024-05-11 17:56:29 +00:00
type LLMPlugin struct {
2024-05-11 14:37:57 +00:00
b bot.Bot
c *config.Config
h bot.HandlerTable
chatCount int
chatHistory []chatEntry
}
type chatEntry struct {
Role string `json:"role"`
Content string `json:"content"`
}
2024-05-11 17:56:29 +00:00
func New(b bot.Bot) *LLMPlugin {
p := &LLMPlugin{
2024-05-11 14:37:57 +00:00
b: b,
c: b.Config(),
}
p.register()
return p
}
2024-05-11 17:56:29 +00:00
func (p *LLMPlugin) register() {
2024-05-11 14:37:57 +00:00
p.h = bot.HandlerTable{
{
Kind: bot.Message, IsCmd: true,
Regex: regexp.MustCompile(`(?is)^gpt (?P<text>.*)`),
HelpText: "chat completion",
Handler: p.chatMessageForce,
},
{
Kind: bot.Message, IsCmd: true,
Regex: regexp.MustCompile(`(?is)^got (?P<text>.*)`),
HelpText: "chat completion",
Handler: p.chatMessageForce,
},
{
Kind: bot.Message, IsCmd: true,
Regex: regexp.MustCompile(`(?is)^gpt-prompt: (?P<text>.*)`),
HelpText: "set the ChatGPT prompt",
Handler: p.setPromptMessage,
},
}
p.b.RegisterTable(p, p.h)
}
2024-05-11 17:56:29 +00:00
func (p *LLMPlugin) setPromptMessage(r bot.Request) bool {
2024-05-11 14:37:57 +00:00
prompt := r.Values["text"]
if err := p.setPrompt(prompt); err != nil {
resp := fmt.Sprintf("Error: %s", err)
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, resp)
}
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, fmt.Sprintf(`Okay. I set the prompt to: "%s"`, prompt))
return true
}
2024-05-11 17:56:29 +00:00
func (p *LLMPlugin) chatMessage(r bot.Request) bool {
2024-05-11 14:37:57 +00:00
if slices.Contains(p.c.GetArray("gpt.silence", []string{}), r.Msg.Channel) {
log.Debug().Msgf("%s silenced", r.Msg.Channel)
return true
}
return p.chatMessageForce(r)
}
2024-05-11 17:56:29 +00:00
func (p *LLMPlugin) chatMessageForce(r bot.Request) bool {
2024-05-11 14:37:57 +00:00
p.chatHistory = append(p.chatHistory, chatEntry{
Role: "user",
Content: r.Values["text"],
})
chatResp, err := p.llama()
if err == nil {
p.chatHistory = append(p.chatHistory, chatResp)
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, chatResp.Content)
return true
} else if !errors.Is(err, InstanceNotFoundError) {
log.Error().Err(err).Msgf("error contacting llama")
} else {
log.Info().Msgf("Llama is currently down")
}
resp, err := p.chatGPT(r.Values["text"])
if err != nil {
resp = fmt.Sprintf("Error: %s", err)
}
p.chatHistory = append(p.chatHistory, chatEntry{
Role: "assistant",
Content: resp,
})
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, resp)
return true
}