mirror of https://github.com/velour/catbase.git
Compare commits
No commits in common. "6963881c84b8098fa376bae05a5e452f9838e297" and "fc9f264a839de42cd29cbe366940efe588c44e54" have entirely different histories.
6963881c84
...
fc9f264a83
|
@ -7,10 +7,10 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: Set up Go 1.21
|
- name: Set up Go 1.18
|
||||||
uses: actions/setup-go@v1
|
uses: actions/setup-go@v1
|
||||||
with:
|
with:
|
||||||
go-version: 1.21.x
|
go-version: 1.18.x
|
||||||
id: go
|
id: go
|
||||||
|
|
||||||
- name: Check out code into the Go module directory
|
- name: Check out code into the Go module directory
|
||||||
|
|
|
@ -106,7 +106,7 @@ func (c *Config) GetString(key, fallback string) string {
|
||||||
q := `select value from config where key=?`
|
q := `select value from config where key=?`
|
||||||
err := c.DB.Get(&configValue, q, key)
|
err := c.DB.Get(&configValue, q, key)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Info().Msgf("WARN: Key %s is empty", key)
|
log.Debug().Msgf("WARN: Key %s is empty", key)
|
||||||
return fallback
|
return fallback
|
||||||
}
|
}
|
||||||
return configValue
|
return configValue
|
||||||
|
|
|
@ -82,7 +82,7 @@ func TestBabblerNothingSaid(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func testBabbler(t *testing.T) {
|
func TestBabbler(t *testing.T) {
|
||||||
mb := bot.NewMockBot()
|
mb := bot.NewMockBot()
|
||||||
bp := newBabblerPlugin(mb)
|
bp := newBabblerPlugin(mb)
|
||||||
assert.NotNil(t, bp)
|
assert.NotNil(t, bp)
|
||||||
|
|
|
@ -9,7 +9,6 @@ import (
|
||||||
"net/http"
|
"net/http"
|
||||||
"reflect"
|
"reflect"
|
||||||
"regexp"
|
"regexp"
|
||||||
"slices"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/rs/zerolog/log"
|
"github.com/rs/zerolog/log"
|
||||||
|
@ -49,7 +48,7 @@ func (p *GPTPlugin) register() {
|
||||||
Kind: bot.Message, IsCmd: true,
|
Kind: bot.Message, IsCmd: true,
|
||||||
Regex: regexp.MustCompile(`(?is)^gpt (?P<text>.*)`),
|
Regex: regexp.MustCompile(`(?is)^gpt (?P<text>.*)`),
|
||||||
HelpText: "chat completion",
|
HelpText: "chat completion",
|
||||||
Handler: p.chatMessageForce,
|
Handler: p.chatMessage,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Kind: bot.Message, IsCmd: true,
|
Kind: bot.Message, IsCmd: true,
|
||||||
|
@ -63,6 +62,7 @@ func (p *GPTPlugin) register() {
|
||||||
Handler: p.chatMessage,
|
Handler: p.chatMessage,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
log.Debug().Msg("Registering GPT3 handlers")
|
||||||
p.b.RegisterTable(p, p.h)
|
p.b.RegisterTable(p, p.h)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,14 +77,6 @@ func (p *GPTPlugin) setPromptMessage(r bot.Request) bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *GPTPlugin) chatMessage(r bot.Request) bool {
|
func (p *GPTPlugin) chatMessage(r bot.Request) bool {
|
||||||
if slices.Contains(p.c.GetArray("gpt.silence", []string{}), r.Msg.Channel) {
|
|
||||||
log.Debug().Msgf("%s silenced", r.Msg.Channel)
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return p.chatMessageForce(r)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *GPTPlugin) chatMessageForce(r bot.Request) bool {
|
|
||||||
resp, err := p.chatGPT(r.Values["text"])
|
resp, err := p.chatGPT(r.Values["text"])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
resp = fmt.Sprintf("Error: %s", err)
|
resp = fmt.Sprintf("Error: %s", err)
|
||||||
|
|
|
@ -1,14 +1,8 @@
|
||||||
package tldr
|
package tldr
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"context"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/andrewstuart/openai"
|
|
||||||
"github.com/velour/catbase/config"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
"strings"
|
||||||
"text/template"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/velour/catbase/bot"
|
"github.com/velour/catbase/bot"
|
||||||
|
@ -19,14 +13,9 @@ import (
|
||||||
"github.com/james-bowman/nlp"
|
"github.com/james-bowman/nlp"
|
||||||
)
|
)
|
||||||
|
|
||||||
const templateKey = "tldr.prompttemplate"
|
|
||||||
|
|
||||||
var defaultTemplate = "Summarize the following conversation:\n"
|
|
||||||
|
|
||||||
type TLDRPlugin struct {
|
type TLDRPlugin struct {
|
||||||
b bot.Bot
|
bot bot.Bot
|
||||||
c *config.Config
|
history []history
|
||||||
history map[string][]history
|
|
||||||
index int
|
index int
|
||||||
lastRequest time.Time
|
lastRequest time.Time
|
||||||
}
|
}
|
||||||
|
@ -39,165 +28,120 @@ type history struct {
|
||||||
|
|
||||||
func New(b bot.Bot) *TLDRPlugin {
|
func New(b bot.Bot) *TLDRPlugin {
|
||||||
plugin := &TLDRPlugin{
|
plugin := &TLDRPlugin{
|
||||||
b: b,
|
bot: b,
|
||||||
c: b.Config(),
|
history: []history{},
|
||||||
history: map[string][]history{},
|
|
||||||
index: 0,
|
index: 0,
|
||||||
lastRequest: time.Now().Add(-24 * time.Hour),
|
lastRequest: time.Now().Add(-24 * time.Hour),
|
||||||
}
|
}
|
||||||
plugin.register()
|
b.Register(plugin, bot.Message, plugin.message)
|
||||||
|
b.Register(plugin, bot.Help, plugin.help)
|
||||||
return plugin
|
return plugin
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *TLDRPlugin) register() {
|
func (p *TLDRPlugin) message(c bot.Connector, kind bot.Kind, message msg.Message, args ...any) bool {
|
||||||
p.b.RegisterTable(p, bot.HandlerTable{
|
timeLimit := time.Duration(p.bot.Config().GetInt("TLDR.HourLimit", 1))
|
||||||
{
|
lowercaseMessage := strings.ToLower(message.Body)
|
||||||
Kind: bot.Message, IsCmd: true,
|
if lowercaseMessage == "tl;dr" && p.lastRequest.After(time.Now().Add(-timeLimit*time.Hour)) {
|
||||||
Regex: regexp.MustCompile(`old tl;dr`),
|
p.bot.Send(c, bot.Message, message.Channel, "Slow down, cowboy. Read that tiny backlog.")
|
||||||
HelpText: "Get a rather inaccurate summary of the channel",
|
return true
|
||||||
Handler: p.tldrCmd,
|
} else if lowercaseMessage == "tl;dr" {
|
||||||
},
|
p.lastRequest = time.Now()
|
||||||
{
|
nTopics := p.bot.Config().GetInt("TLDR.Topics", 5)
|
||||||
Kind: bot.Message, IsCmd: true,
|
|
||||||
Regex: regexp.MustCompile(`tl;?dr-prompt$`),
|
stopWordSlice := p.bot.Config().GetArray("TLDR.StopWords", []string{})
|
||||||
HelpText: "Get the tl;dr prompt",
|
if len(stopWordSlice) == 0 {
|
||||||
Handler: p.squawkTLDR,
|
stopWordSlice = THESE_ARE_NOT_THE_WORDS_YOU_ARE_LOOKING_FOR
|
||||||
},
|
p.bot.Config().SetArray("TLDR.StopWords", stopWordSlice)
|
||||||
{
|
}
|
||||||
Kind: bot.Message, IsCmd: true,
|
|
||||||
Regex: regexp.MustCompile(`tl;?dr-prompt reset`),
|
vectoriser := nlp.NewCountVectoriser(stopWordSlice...)
|
||||||
HelpText: "Reset the tl;dr prompt",
|
lda := nlp.NewLatentDirichletAllocation(nTopics)
|
||||||
Handler: p.resetTLDR,
|
pipeline := nlp.NewPipeline(vectoriser, lda)
|
||||||
},
|
docsOverTopics, err := pipeline.FitTransform(p.getTopics()...)
|
||||||
{
|
|
||||||
Kind: bot.Message, IsCmd: true,
|
if err != nil {
|
||||||
Regex: regexp.MustCompile(`tl;?dr-prompt (?P<prompt>.*)`),
|
log.Error().Err(err)
|
||||||
HelpText: "Set the tl;dr prompt",
|
return false
|
||||||
Handler: p.setTLDR,
|
}
|
||||||
},
|
|
||||||
{
|
bestScores := make([][]float64, nTopics)
|
||||||
Kind: bot.Message, IsCmd: true,
|
bestDocs := make([][]history, nTopics)
|
||||||
Regex: regexp.MustCompile(`tl;?dr`),
|
|
||||||
HelpText: "Get a summary of the channel",
|
supportingDocs := p.bot.Config().GetInt("TLDR.Support", 3)
|
||||||
Handler: p.betterTLDR,
|
for i := 0; i < nTopics; i++ {
|
||||||
},
|
bestScores[i] = make([]float64, supportingDocs)
|
||||||
{
|
bestDocs[i] = make([]history, supportingDocs)
|
||||||
Kind: bot.Message, IsCmd: false,
|
}
|
||||||
Regex: regexp.MustCompile(`.*`),
|
|
||||||
Handler: p.record,
|
dr, dc := docsOverTopics.Dims()
|
||||||
},
|
for topic := 0; topic < dr; topic++ {
|
||||||
})
|
minScore, minIndex := min(bestScores[topic])
|
||||||
p.b.Register(p, bot.Help, p.help)
|
|
||||||
}
|
for doc := 0; doc < dc; doc++ {
|
||||||
|
score := docsOverTopics.At(topic, doc)
|
||||||
|
if score > minScore {
|
||||||
|
bestScores[topic][minIndex] = score
|
||||||
|
bestDocs[topic][minIndex] = p.history[doc]
|
||||||
|
minScore, minIndex = min(bestScores[topic])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
topicsOverWords := lda.Components()
|
||||||
|
tr, tc := topicsOverWords.Dims()
|
||||||
|
|
||||||
|
vocab := make([]string, len(vectoriser.Vocabulary))
|
||||||
|
for k, v := range vectoriser.Vocabulary {
|
||||||
|
vocab[v] = k
|
||||||
|
}
|
||||||
|
|
||||||
|
response := "Here you go captain 'too good to read backlog':\n"
|
||||||
|
|
||||||
|
for topic := 0; topic < tr; topic++ {
|
||||||
|
bestScore := -1.
|
||||||
|
bestTopic := ""
|
||||||
|
for word := 0; word < tc; word++ {
|
||||||
|
score := topicsOverWords.At(topic, word)
|
||||||
|
if score > bestScore {
|
||||||
|
bestScore = score
|
||||||
|
bestTopic = vocab[word]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
response += fmt.Sprintf("\n*Topic #%d: %s*\n", topic, bestTopic)
|
||||||
|
for i := range bestDocs[topic] {
|
||||||
|
response += fmt.Sprintf("<%s>%s\n", bestDocs[topic][i].user, bestDocs[topic][i].body)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
p.bot.Send(c, bot.Message, message.Channel, response)
|
||||||
|
|
||||||
func (p *TLDRPlugin) tldrCmd(r bot.Request) bool {
|
|
||||||
timeLimit := time.Duration(p.b.Config().GetInt("TLDR.HourLimit", 1))
|
|
||||||
if p.lastRequest.After(time.Now().Add(-timeLimit * time.Hour)) {
|
|
||||||
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, "Slow down, cowboy. Read that tiny backlog.")
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *TLDRPlugin) record(r bot.Request) bool {
|
|
||||||
hist := history{
|
hist := history{
|
||||||
body: strings.ToLower(r.Msg.Body),
|
body: lowercaseMessage,
|
||||||
user: r.Msg.User.Name,
|
user: message.User.Name,
|
||||||
timestamp: time.Now(),
|
timestamp: time.Now(),
|
||||||
}
|
}
|
||||||
p.addHistory(r.Msg.Channel, hist)
|
p.addHistory(hist)
|
||||||
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *TLDRPlugin) oldTLDR(r bot.Request) bool {
|
func (p *TLDRPlugin) addHistory(hist history) {
|
||||||
p.lastRequest = time.Now()
|
p.history = append(p.history, hist)
|
||||||
nTopics := p.b.Config().GetInt("TLDR.Topics", 5)
|
sz := len(p.history)
|
||||||
|
max := p.bot.Config().GetInt("TLDR.HistorySize", 1000)
|
||||||
stopWordSlice := p.b.Config().GetArray("TLDR.StopWords", []string{})
|
keepHrs := time.Duration(p.bot.Config().GetInt("TLDR.KeepHours", 24))
|
||||||
if len(stopWordSlice) == 0 {
|
|
||||||
stopWordSlice = THESE_ARE_NOT_THE_WORDS_YOU_ARE_LOOKING_FOR
|
|
||||||
p.b.Config().SetArray("TLDR.StopWords", stopWordSlice)
|
|
||||||
}
|
|
||||||
|
|
||||||
vectoriser := nlp.NewCountVectoriser(stopWordSlice...)
|
|
||||||
lda := nlp.NewLatentDirichletAllocation(nTopics)
|
|
||||||
pipeline := nlp.NewPipeline(vectoriser, lda)
|
|
||||||
docsOverTopics, err := pipeline.FitTransform(p.getTopics()...)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Error().Err(err)
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
bestScores := make([][]float64, nTopics)
|
|
||||||
bestDocs := make([][]history, nTopics)
|
|
||||||
|
|
||||||
supportingDocs := p.b.Config().GetInt("TLDR.Support", 3)
|
|
||||||
for i := 0; i < nTopics; i++ {
|
|
||||||
bestScores[i] = make([]float64, supportingDocs)
|
|
||||||
bestDocs[i] = make([]history, supportingDocs)
|
|
||||||
}
|
|
||||||
|
|
||||||
dr, dc := docsOverTopics.Dims()
|
|
||||||
for topic := 0; topic < dr; topic++ {
|
|
||||||
minScore, minIndex := min(bestScores[topic])
|
|
||||||
|
|
||||||
for doc := 0; doc < dc; doc++ {
|
|
||||||
score := docsOverTopics.At(topic, doc)
|
|
||||||
if score > minScore {
|
|
||||||
bestScores[topic][minIndex] = score
|
|
||||||
bestDocs[topic][minIndex] = p.history[r.Msg.Channel][doc]
|
|
||||||
minScore, minIndex = min(bestScores[topic])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
topicsOverWords := lda.Components()
|
|
||||||
tr, tc := topicsOverWords.Dims()
|
|
||||||
|
|
||||||
vocab := make([]string, len(vectoriser.Vocabulary))
|
|
||||||
for k, v := range vectoriser.Vocabulary {
|
|
||||||
vocab[v] = k
|
|
||||||
}
|
|
||||||
|
|
||||||
response := "Here you go captain 'too good to read backlog':\n"
|
|
||||||
|
|
||||||
for topic := 0; topic < tr; topic++ {
|
|
||||||
bestScore := -1.
|
|
||||||
bestTopic := ""
|
|
||||||
for word := 0; word < tc; word++ {
|
|
||||||
score := topicsOverWords.At(topic, word)
|
|
||||||
if score > bestScore {
|
|
||||||
bestScore = score
|
|
||||||
bestTopic = vocab[word]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
response += fmt.Sprintf("\n*Topic #%d: %s*\n", topic, bestTopic)
|
|
||||||
for i := range bestDocs[topic] {
|
|
||||||
response += fmt.Sprintf("<%s>%s\n", bestDocs[topic][i].user, bestDocs[topic][i].body)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, response)
|
|
||||||
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *TLDRPlugin) addHistory(ch string, hist history) {
|
|
||||||
p.history[ch] = append(p.history[ch], hist)
|
|
||||||
sz := len(p.history[ch])
|
|
||||||
max := p.b.Config().GetInt("TLDR.HistorySize", 1000)
|
|
||||||
keepHrs := time.Duration(p.b.Config().GetInt("TLDR.KeepHours", 24))
|
|
||||||
// Clamp the size of the history
|
// Clamp the size of the history
|
||||||
if sz > max {
|
if sz > max {
|
||||||
p.history[ch] = p.history[ch][len(p.history)-max:]
|
p.history = p.history[len(p.history)-max:]
|
||||||
}
|
}
|
||||||
// Remove old entries
|
// Remove old entries
|
||||||
yesterday := time.Now().Add(-keepHrs * time.Hour)
|
yesterday := time.Now().Add(-keepHrs * time.Hour)
|
||||||
begin := 0
|
begin := 0
|
||||||
for i, m := range p.history[ch] {
|
for i, m := range p.history {
|
||||||
if !m.timestamp.Before(yesterday) {
|
if !m.timestamp.Before(yesterday) {
|
||||||
begin = i - 1 // should keep this message
|
begin = i - 1 // should keep this message
|
||||||
if begin < 0 {
|
if begin < 0 {
|
||||||
|
@ -206,22 +150,20 @@ func (p *TLDRPlugin) addHistory(ch string, hist history) {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
p.history[ch] = p.history[ch][begin:]
|
p.history = p.history[begin:]
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *TLDRPlugin) getTopics() []string {
|
func (p *TLDRPlugin) getTopics() []string {
|
||||||
hist := []string{}
|
hist := []string{}
|
||||||
for _, ch := range p.history {
|
for _, h := range p.history {
|
||||||
for _, h := range ch {
|
hist = append(hist, h.body)
|
||||||
hist = append(hist, h.body)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return hist
|
return hist
|
||||||
}
|
}
|
||||||
|
|
||||||
// Help responds to help requests. Every plugin must implement a help function.
|
// Help responds to help requests. Every plugin must implement a help function.
|
||||||
func (p *TLDRPlugin) help(c bot.Connector, kind bot.Kind, message msg.Message, args ...any) bool {
|
func (p *TLDRPlugin) help(c bot.Connector, kind bot.Kind, message msg.Message, args ...any) bool {
|
||||||
p.b.Send(c, bot.Message, message.Channel, "tl;dr")
|
p.bot.Send(c, bot.Message, message.Channel, "tl;dr")
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -236,69 +178,3 @@ func min(slice []float64) (float64, int) {
|
||||||
}
|
}
|
||||||
return minVal, minIndex
|
return minVal, minIndex
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *TLDRPlugin) betterTLDR(r bot.Request) bool {
|
|
||||||
ch := r.Msg.Channel
|
|
||||||
c, err := p.getClient()
|
|
||||||
if err != nil {
|
|
||||||
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, "Couldn't fetch an OpenAI client")
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
promptConfig := p.c.Get(templateKey, defaultTemplate)
|
|
||||||
promptTpl := template.Must(template.New("gptprompt").Parse(promptConfig))
|
|
||||||
prompt := bytes.Buffer{}
|
|
||||||
data := p.c.GetMap("tldr.promptdata", map[string]string{})
|
|
||||||
promptTpl.Execute(&prompt, data)
|
|
||||||
backlog := ""
|
|
||||||
maxLen := p.c.GetInt("tldr.maxgpt", 4096)
|
|
||||||
for i := len(p.history[ch]) - 1; i >= 0; i-- {
|
|
||||||
h := p.history[ch][i]
|
|
||||||
str := fmt.Sprintf("%s: %s\n", h.user, h.body)
|
|
||||||
if len(backlog) > maxLen {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
backlog = str + backlog
|
|
||||||
}
|
|
||||||
sess := c.NewChatSession(prompt.String())
|
|
||||||
completion, err := sess.Complete(context.TODO(), backlog)
|
|
||||||
if err != nil {
|
|
||||||
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, "Couldn't run the OpenAI request")
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
log.Debug().
|
|
||||||
Str("prompt", prompt.String()).
|
|
||||||
Str("backlog", backlog).
|
|
||||||
Str("completion", completion).
|
|
||||||
Msgf("tl;dr")
|
|
||||||
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, completion)
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *TLDRPlugin) squawkTLDR(r bot.Request) bool {
|
|
||||||
prompt := p.c.Get(templateKey, defaultTemplate)
|
|
||||||
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, fmt.Sprintf(`Current prompt is: "%s"`,
|
|
||||||
strings.TrimSpace(prompt)))
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *TLDRPlugin) resetTLDR(r bot.Request) bool {
|
|
||||||
p.c.Set(templateKey, defaultTemplate)
|
|
||||||
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, fmt.Sprintf(`Set prompt to: "%s"`,
|
|
||||||
strings.TrimSpace(defaultTemplate)))
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *TLDRPlugin) setTLDR(r bot.Request) bool {
|
|
||||||
prompt := r.Values["prompt"] + "\n"
|
|
||||||
p.c.Set(templateKey, prompt)
|
|
||||||
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, fmt.Sprintf(`Set prompt to: "%s"`, strings.TrimSpace(prompt)))
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *TLDRPlugin) getClient() (*openai.Client, error) {
|
|
||||||
token := p.c.Get("gpt.token", "")
|
|
||||||
if token == "" {
|
|
||||||
return nil, fmt.Errorf("no GPT token given")
|
|
||||||
}
|
|
||||||
return openai.NewClient(token)
|
|
||||||
}
|
|
||||||
|
|
|
@ -20,27 +20,20 @@ func init() {
|
||||||
log.Logger = log.Logger.Output(zerolog.ConsoleWriter{Out: os.Stderr})
|
log.Logger = log.Logger.Output(zerolog.ConsoleWriter{Out: os.Stderr})
|
||||||
}
|
}
|
||||||
|
|
||||||
var ch = "test"
|
func makeMessageBy(payload, by string) (bot.Connector, bot.Kind, msg.Message) {
|
||||||
|
|
||||||
func makeMessageBy(payload, by string) bot.Request {
|
|
||||||
isCmd := strings.HasPrefix(payload, "!")
|
isCmd := strings.HasPrefix(payload, "!")
|
||||||
if isCmd {
|
if isCmd {
|
||||||
payload = payload[1:]
|
payload = payload[1:]
|
||||||
}
|
}
|
||||||
|
return &cli.CliPlugin{}, bot.Message, msg.Message{
|
||||||
return bot.Request{
|
User: &user.User{Name: by},
|
||||||
Conn: &cli.CliPlugin{},
|
Channel: "test",
|
||||||
Kind: bot.Message,
|
Body: payload,
|
||||||
Msg: msg.Message{
|
Command: isCmd,
|
||||||
User: &user.User{Name: by},
|
|
||||||
Channel: ch,
|
|
||||||
Body: payload,
|
|
||||||
Command: isCmd,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeMessage(payload string) bot.Request {
|
func makeMessage(payload string) (bot.Connector, bot.Kind, msg.Message) {
|
||||||
return makeMessageBy(payload, "tester")
|
return makeMessageBy(payload, "tester")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -50,12 +43,51 @@ func setup(t *testing.T) (*TLDRPlugin, *bot.MockBot) {
|
||||||
return r, mb
|
return r, mb
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Test(t *testing.T) {
|
||||||
|
c, mb := setup(t)
|
||||||
|
res := c.message(makeMessage("The quick brown fox jumped over the lazy dog"))
|
||||||
|
res = c.message(makeMessage("The cow jumped over the moon"))
|
||||||
|
res = c.message(makeMessage("The little dog laughed to see such fun"))
|
||||||
|
res = c.message(makeMessage("tl;dr"))
|
||||||
|
assert.True(t, res)
|
||||||
|
assert.Len(t, mb.Messages, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDoubleUp(t *testing.T) {
|
||||||
|
c, mb := setup(t)
|
||||||
|
res := c.message(makeMessage("The quick brown fox jumped over the lazy dog"))
|
||||||
|
res = c.message(makeMessage("The cow jumped over the moon"))
|
||||||
|
res = c.message(makeMessage("The little dog laughed to see such fun"))
|
||||||
|
res = c.message(makeMessage("tl;dr"))
|
||||||
|
res = c.message(makeMessage("tl;dr"))
|
||||||
|
assert.True(t, res)
|
||||||
|
assert.Len(t, mb.Messages, 2)
|
||||||
|
assert.Contains(t, mb.Messages[1], "Slow down, cowboy.")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAddHistoryLimitsMessages(t *testing.T) {
|
||||||
|
c, _ := setup(t)
|
||||||
|
max := 1000
|
||||||
|
c.bot.Config().Set("TLDR.HistorySize", strconv.Itoa(max))
|
||||||
|
c.bot.Config().Set("TLDR.KeepHours", "24")
|
||||||
|
t0 := time.Now().Add(-24 * time.Hour)
|
||||||
|
for i := 0; i < max*2; i++ {
|
||||||
|
hist := history{
|
||||||
|
body: "test",
|
||||||
|
user: "tester",
|
||||||
|
timestamp: t0.Add(time.Duration(i) * time.Second),
|
||||||
|
}
|
||||||
|
c.addHistory(hist)
|
||||||
|
}
|
||||||
|
assert.Len(t, c.history, max)
|
||||||
|
}
|
||||||
|
|
||||||
func TestAddHistoryLimitsDays(t *testing.T) {
|
func TestAddHistoryLimitsDays(t *testing.T) {
|
||||||
c, _ := setup(t)
|
c, _ := setup(t)
|
||||||
hrs := 24
|
hrs := 24
|
||||||
expected := 24
|
expected := 24
|
||||||
c.b.Config().Set("TLDR.HistorySize", "100")
|
c.bot.Config().Set("TLDR.HistorySize", "100")
|
||||||
c.b.Config().Set("TLDR.KeepHours", strconv.Itoa(hrs))
|
c.bot.Config().Set("TLDR.KeepHours", strconv.Itoa(hrs))
|
||||||
t0 := time.Now().Add(-time.Duration(hrs*2) * time.Hour)
|
t0 := time.Now().Add(-time.Duration(hrs*2) * time.Hour)
|
||||||
for i := 0; i < 48; i++ {
|
for i := 0; i < 48; i++ {
|
||||||
hist := history{
|
hist := history{
|
||||||
|
@ -63,7 +95,7 @@ func TestAddHistoryLimitsDays(t *testing.T) {
|
||||||
user: "tester",
|
user: "tester",
|
||||||
timestamp: t0.Add(time.Duration(i) * time.Hour),
|
timestamp: t0.Add(time.Duration(i) * time.Hour),
|
||||||
}
|
}
|
||||||
c.addHistory(ch, hist)
|
c.addHistory(hist)
|
||||||
}
|
}
|
||||||
assert.Len(t, c.history[ch], expected, "%d != %d", len(c.history), expected)
|
assert.Len(t, c.history, expected, "%d != %d", len(c.history), expected)
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue