talklikeapirate: we just went nucular

This commit is contained in:
Chris Sexton 2024-09-28 10:28:58 -04:00
parent 0a12e796d0
commit 624258a794
5 changed files with 126 additions and 5 deletions

View File

@ -4,6 +4,7 @@ import (
"bytes" "bytes"
"errors" "errors"
"fmt" "fmt"
"github.com/velour/catbase/plugins/talklikeapirate"
"net/http" "net/http"
"strconv" "strconv"
"strings" "strings"
@ -33,6 +34,8 @@ type Discord struct {
cmdHandlers map[string]CmdHandler cmdHandlers map[string]CmdHandler
guildID string guildID string
Pirate *talklikeapirate.TalkLikeAPiratePlugin
} }
func New(config *config.Config) *Discord { func New(config *config.Config) *Discord {
@ -112,6 +115,14 @@ func (d Discord) Send(kind bot.Kind, args ...any) (string, error) {
} }
func (d *Discord) sendMessage(channel, message string, meMessage bool, args ...any) (string, error) { func (d *Discord) sendMessage(channel, message string, meMessage bool, args ...any) (string, error) {
var err error
if d.Pirate != nil {
message, err = d.Pirate.Filter(message)
if err != nil {
log.Error().Err(err).Msg("could not pirate message")
}
}
if meMessage && !strings.HasPrefix(message, "_") && !strings.HasSuffix(message, "_") { if meMessage && !strings.HasPrefix(message, "_") && !strings.HasSuffix(message, "_") {
message = "_" + message + "_" message = "_" + message + "_"
} }
@ -167,7 +178,6 @@ func (d *Discord) sendMessage(channel, message string, meMessage bool, args ...a
maxLen := 2000 maxLen := 2000
chunkSize := maxLen - 100 chunkSize := maxLen - 100
var st *discordgo.Message var st *discordgo.Message
var err error
if len(data.Content) > maxLen { if len(data.Content) > maxLen {
tmp := data.Content tmp := data.Content
data.Content = tmp[:chunkSize] data.Content = tmp[:chunkSize]

View File

@ -7,6 +7,7 @@ package main
import ( import (
"flag" "flag"
"github.com/velour/catbase/plugins" "github.com/velour/catbase/plugins"
"github.com/velour/catbase/plugins/talklikeapirate"
"io" "io"
"math/rand" "math/rand"
"os" "os"
@ -71,7 +72,9 @@ func main() {
case "slackapp": case "slackapp":
client = slackapp.New(c) client = slackapp.New(c)
case "discord": case "discord":
client = discord.New(c) d := discord.New(c)
d.Pirate = talklikeapirate.New(c)
client = d
default: default:
log.Fatal().Msgf("Unknown connection type: %s", c.Get("type", "UNSET")) log.Fatal().Msgf("Unknown connection type: %s", c.Get("type", "UNSET"))
} }

View File

@ -25,7 +25,7 @@ func (p *LLMPlugin) geminiConnect() error {
} }
func (p *LLMPlugin) gemini(msg string) (chatEntry, error) { func (p *LLMPlugin) gemini(msg string) (chatEntry, error) {
model := p.geminiClient.GenerativeModel("gemini-1.5-flash") model := p.geminiClient.GenerativeModel(p.c.Get("gemini.model", "gemini-1.5-flash"))
model.SetMaxOutputTokens(int32(p.c.GetInt("gemini.maxtokens", 100))) model.SetMaxOutputTokens(int32(p.c.GetInt("gemini.maxtokens", 100)))
model.SetTopP(float32(p.c.GetFloat64("gemini.topp", 0.95))) model.SetTopP(float32(p.c.GetFloat64("gemini.topp", 0.95)))
model.SetTopK(int32(p.c.GetInt("gemini.topk", 20))) model.SetTopK(int32(p.c.GetInt("gemini.topk", 20)))

View File

@ -0,0 +1,108 @@
package talklikeapirate
import (
"context"
"errors"
"fmt"
"github.com/google/generative-ai-go/genai"
"github.com/rs/zerolog/log"
"github.com/velour/catbase/bot"
"github.com/velour/catbase/config"
"google.golang.org/api/option"
)
// TalkLikeAPiratePlugin reimplements the send function
// with an AI intermediate.
type TalkLikeAPiratePlugin struct {
client *genai.Client
prompt string
b bot.Bot
c *config.Config
}
func New(c *config.Config) *TalkLikeAPiratePlugin {
p := &TalkLikeAPiratePlugin{
c: c,
}
return p
}
func (p *TalkLikeAPiratePlugin) Filter(input string) (string, error) {
if !p.c.GetBool("talklikeapirate.enabled", false) {
return input, nil
}
if p.client == nil {
var err error
p.client, err = p.getClient()
if err != nil {
return input, err
}
}
model, err := p.GetModel()
if err != nil {
log.Error().Err(err).Send()
return input, err
}
res, err := model.GenerateContent(context.Background(), genai.Text(input))
if err != nil {
log.Error().Err(err).Send()
return input, err
}
if len(res.Candidates) == 0 {
err := errors.New("no candidates found")
log.Error().Err(err).Send()
return input, err
}
// Need to check here that we got an actual completion, not a
// warning about bad content. FinishReason exists on Completion.
completion := ""
for _, p := range res.Candidates[0].Content.Parts {
completion += fmt.Sprintf("%s", p)
}
return completion, nil
}
func (p *TalkLikeAPiratePlugin) GetModel() (*genai.GenerativeModel, error) {
model := p.client.GenerativeModel(p.c.Get("gemini.model", "gemini-1.5-flash"))
model.SetMaxOutputTokens(int32(p.c.GetInt("gemini.maxtokens", 100)))
model.SetTopP(float32(p.c.GetFloat64("gemini.topp", 0.95)))
model.SetTopK(int32(p.c.GetInt("gemini.topk", 20)))
model.SetTemperature(float32(p.c.GetFloat64("gemini.temp", 0.9)))
model.SafetySettings = []*genai.SafetySetting{
{genai.HarmCategoryHarassment, genai.HarmBlockNone},
{genai.HarmCategoryHateSpeech, genai.HarmBlockNone},
{genai.HarmCategorySexuallyExplicit, genai.HarmBlockNone},
{genai.HarmCategoryDangerousContent, genai.HarmBlockNone},
}
if prompt := p.c.Get("talklikeapirate.systemprompt", ""); prompt != "" {
model.SystemInstruction = &genai.Content{
Parts: []genai.Part{genai.Text(prompt)},
}
} else {
return nil, errors.New("no system prompt selected")
}
return model, nil
}
func (p *TalkLikeAPiratePlugin) getClient() (*genai.Client, error) {
ctx := context.Background()
key := p.c.Get("GEMINI_API_KEY", "")
if key == "" {
return nil, errors.New("missing GEMINI_API_KEY")
}
client, err := genai.NewClient(ctx, option.WithAPIKey(key))
if err != nil {
return nil, err
}
return client, nil
}

View File

@ -129,7 +129,7 @@ func (p *TLDRPlugin) betterTLDR(r bot.Request) bool {
ch := r.Msg.Channel ch := r.Msg.Channel
c, err := p.getClient() c, err := p.getClient()
if err != nil { if err != nil {
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, "Couldn't fetch an OpenAI client") p.b.Send(r.Conn, bot.Message, r.Msg.Channel, "Couldn't fetch an AI client")
return true return true
} }
promptConfig := p.c.Get(templateKey, defaultTemplate) promptConfig := p.c.Get(templateKey, defaultTemplate)
@ -148,7 +148,7 @@ func (p *TLDRPlugin) betterTLDR(r bot.Request) bool {
backlog = str + backlog backlog = str + backlog
} }
model := c.GenerativeModel("gemini-1.5-flash") model := c.GenerativeModel(p.c.Get("gemini.model", "gemini-1.5-flash"))
model.SystemInstruction = &genai.Content{ model.SystemInstruction = &genai.Content{
Parts: []genai.Part{genai.Text(prompt.String())}, Parts: []genai.Part{genai.Text(prompt.String())},
} }