Compare commits

..

No commits in common. "6f3ba974e61ff8afd599ff201c1bddd4450ef1ce" and "d6bb334a1456ad0df3ab487824ade08e1d940926" have entirely different histories.

4 changed files with 30 additions and 76 deletions

View File

@ -1,4 +1,4 @@
package llm
package gpt
import (
"context"
@ -10,7 +10,7 @@ import (
var session openai.ChatSession
var client *openai.Client
func (p *LLMPlugin) getClient() (*openai.Client, error) {
func (p *GPTPlugin) getClient() (*openai.Client, error) {
token := p.c.Get("gpt.token", "")
if token == "" {
return nil, fmt.Errorf("no GPT token given")
@ -18,7 +18,7 @@ func (p *LLMPlugin) getClient() (*openai.Client, error) {
return openai.NewClient(token)
}
func (p *LLMPlugin) chatGPT(request string) (string, error) {
func (p *GPTPlugin) chatGPT(request string) (string, error) {
if client == nil {
if err := p.setPrompt(p.getDefaultPrompt()); err != nil {
return "", err
@ -32,11 +32,11 @@ func (p *LLMPlugin) chatGPT(request string) (string, error) {
return session.Complete(context.Background(), request)
}
func (p *LLMPlugin) getDefaultPrompt() string {
func (p *GPTPlugin) getDefaultPrompt() string {
return p.c.Get("gpt.prompt", "")
}
func (p *LLMPlugin) setPrompt(prompt string) error {
func (p *GPTPlugin) setPrompt(prompt string) error {
var err error
client, err = p.getClient()
if err != nil {

View File

@ -1,4 +1,4 @@
package llm
package gpt
import (
"errors"
@ -13,7 +13,7 @@ import (
const gpt3URL = "https://api.openai.com/v1/engines/%s/completions"
const gpt3ModURL = "https://api.openai.com/v1/moderations"
type LLMPlugin struct {
type GPTPlugin struct {
b bot.Bot
c *config.Config
h bot.HandlerTable
@ -27,8 +27,8 @@ type chatEntry struct {
Content string `json:"content"`
}
func New(b bot.Bot) *LLMPlugin {
p := &LLMPlugin{
func New(b bot.Bot) *GPTPlugin {
p := &GPTPlugin{
b: b,
c: b.Config(),
}
@ -36,20 +36,8 @@ func New(b bot.Bot) *LLMPlugin {
return p
}
func (p *LLMPlugin) register() {
func (p *GPTPlugin) register() {
p.h = bot.HandlerTable{
{
Kind: bot.Message, IsCmd: true,
Regex: regexp.MustCompile(`(?is)^gpt-prompt: (?P<text>.*)`),
HelpText: "set the ChatGPT prompt",
Handler: p.setPromptMessage,
},
{
Kind: bot.Message, IsCmd: true,
Regex: regexp.MustCompile(`(?is)^llm (?P<text>.*)`),
HelpText: "chat completion",
Handler: p.chatMessageForce,
},
{
Kind: bot.Message, IsCmd: true,
Regex: regexp.MustCompile(`(?is)^gpt (?P<text>.*)`),
@ -62,11 +50,17 @@ func (p *LLMPlugin) register() {
HelpText: "chat completion",
Handler: p.chatMessageForce,
},
{
Kind: bot.Message, IsCmd: true,
Regex: regexp.MustCompile(`(?is)^gpt-prompt: (?P<text>.*)`),
HelpText: "set the ChatGPT prompt",
Handler: p.setPromptMessage,
},
}
p.b.RegisterTable(p, p.h)
}
func (p *LLMPlugin) setPromptMessage(r bot.Request) bool {
func (p *GPTPlugin) setPromptMessage(r bot.Request) bool {
prompt := r.Values["text"]
if err := p.setPrompt(prompt); err != nil {
resp := fmt.Sprintf("Error: %s", err)
@ -76,7 +70,7 @@ func (p *LLMPlugin) setPromptMessage(r bot.Request) bool {
return true
}
func (p *LLMPlugin) chatMessage(r bot.Request) bool {
func (p *GPTPlugin) chatMessage(r bot.Request) bool {
if slices.Contains(p.c.GetArray("gpt.silence", []string{}), r.Msg.Channel) {
log.Debug().Msgf("%s silenced", r.Msg.Channel)
return true
@ -84,15 +78,11 @@ func (p *LLMPlugin) chatMessage(r bot.Request) bool {
return p.chatMessageForce(r)
}
func (p *LLMPlugin) chatMessageForce(r bot.Request) bool {
func (p *GPTPlugin) chatMessageForce(r bot.Request) bool {
p.chatHistory = append(p.chatHistory, chatEntry{
Role: "user",
Content: r.Values["text"],
})
maxHist := p.c.GetInt("gpt.maxhist", 10)
if len(p.chatHistory) > maxHist {
p.chatHistory = p.chatHistory[len(p.chatHistory)-maxHist:]
}
chatResp, err := p.llama()
if err == nil {
p.chatHistory = append(p.chatHistory, chatResp)

View File

@ -1,4 +1,4 @@
package llm
package gpt
import (
"bytes"
@ -11,60 +11,24 @@ import (
)
var InstanceNotFoundError = errors.New("instance not found")
var empty = llamaResponse{}
var empty = chatEntry{}
func (g *LLMPlugin) llama() (chatEntry, error) {
llamaURL := g.c.GetArray("gpt.llamaurls", []string{})
if len(llamaURL) == 0 {
return chatEntry{}, fmt.Errorf("could not find llama url")
func (g *GPTPlugin) llama() (chatEntry, error) {
llamaURL := g.c.Get("gpt.llamaurl", "")
if llamaURL == "" {
return empty, fmt.Errorf("could not find llama url")
}
llamaModel := g.c.Get("gpt.llamamodel", "")
if llamaModel == "" {
return chatEntry{}, fmt.Errorf("could not find llama model")
return empty, fmt.Errorf("could not find llama model")
}
prompt := g.c.Get("gpt.lastprompt", g.c.Get("gpt.prompt", ""))
hist := []chatEntry{{
Role: "system",
Content: prompt,
}}
hist = append(hist, g.chatHistory...)
req := llamaRequest{
Model: llamaModel,
Messages: hist,
Messages: g.chatHistory,
Stream: false,
}
for _, u := range llamaURL {
if err := g.healthCheck(u); err != nil {
continue
}
llamaResp, err := mkRequest(u, req)
if err != nil {
continue
}
return llamaResp.Message, nil
}
return chatEntry{}, InstanceNotFoundError
}
func (p *LLMPlugin) healthCheck(llamaURL string) error {
timeout := p.c.GetInt("gpt.timeout", 1000)
req, _ := http.NewRequest("get", llamaURL, nil)
client := http.Client{
Timeout: time.Duration(timeout) * time.Millisecond,
}
_, err := client.Do(req)
if err != nil {
return err
}
return nil
}
func mkRequest(llamaURL string, req llamaRequest) (llamaResponse, error) {
body, err := json.Marshal(req)
if err != nil {
return empty, fmt.Errorf("could not marshal llama request: %w", err)
@ -86,7 +50,7 @@ func mkRequest(llamaURL string, req llamaRequest) (llamaResponse, error) {
return empty, fmt.Errorf("could not unmarshal llama response: %w, raw: %s", err, string(body))
}
return llamaResp, nil
return llamaResp.Message, nil
}
type llamaRequest struct {

View File

@ -22,10 +22,10 @@ import (
"github.com/velour/catbase/plugins/giphy"
"github.com/velour/catbase/plugins/git"
"github.com/velour/catbase/plugins/goals"
"github.com/velour/catbase/plugins/gpt"
"github.com/velour/catbase/plugins/inventory"
"github.com/velour/catbase/plugins/last"
"github.com/velour/catbase/plugins/leftpad"
"github.com/velour/catbase/plugins/llm"
"github.com/velour/catbase/plugins/mayi"
"github.com/velour/catbase/plugins/meme"
"github.com/velour/catbase/plugins/nerdepedia"
@ -101,7 +101,7 @@ func Register(b bot.Bot) {
b.AddPlugin(topic.New(b))
b.AddPlugin(talker.New(b))
b.AddPlugin(fact.New(b))
b.AddPlugin(llm.New(b))
b.AddPlugin(gpt.New(b))
// catches anything left, will always return true
b.AddPlugin(deadend.New(b))
}