mirror of https://github.com/velour/catbase.git
Compare commits
4 Commits
852239e89d
...
c089a80ffc
Author | SHA1 | Date |
---|---|---|
Chris Sexton | c089a80ffc | |
Chris Sexton | 3ff95d3c85 | |
Chris Sexton | 1743b65242 | |
Chris Sexton | 0397fa2897 |
|
@ -82,7 +82,7 @@ func TestBabblerNothingSaid(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestBabbler(t *testing.T) {
|
||||
func testBabbler(t *testing.T) {
|
||||
mb := bot.NewMockBot()
|
||||
bp := newBabblerPlugin(mb)
|
||||
assert.NotNil(t, bp)
|
||||
|
|
|
@ -19,6 +19,10 @@ import (
|
|||
"github.com/james-bowman/nlp"
|
||||
)
|
||||
|
||||
const templateKey = "tldr.prompttemplate"
|
||||
|
||||
var defaultTemplate = "Summarize the following conversation:\n"
|
||||
|
||||
type TLDRPlugin struct {
|
||||
b bot.Bot
|
||||
c *config.Config
|
||||
|
@ -55,7 +59,25 @@ func (p *TLDRPlugin) register() {
|
|||
},
|
||||
{
|
||||
Kind: bot.Message, IsCmd: true,
|
||||
Regex: regexp.MustCompile(`tl;dr`),
|
||||
Regex: regexp.MustCompile(`tl;?dr-prompt$`),
|
||||
HelpText: "Get the tl;dr prompt",
|
||||
Handler: p.squawkTLDR,
|
||||
},
|
||||
{
|
||||
Kind: bot.Message, IsCmd: true,
|
||||
Regex: regexp.MustCompile(`tl;?dr-prompt reset`),
|
||||
HelpText: "Reset the tl;dr prompt",
|
||||
Handler: p.resetTLDR,
|
||||
},
|
||||
{
|
||||
Kind: bot.Message, IsCmd: true,
|
||||
Regex: regexp.MustCompile(`tl;?dr-prompt (?P<prompt>.*)`),
|
||||
HelpText: "Set the tl;dr prompt",
|
||||
Handler: p.setTLDR,
|
||||
},
|
||||
{
|
||||
Kind: bot.Message, IsCmd: true,
|
||||
Regex: regexp.MustCompile(`tl;?dr`),
|
||||
HelpText: "Get a summary of the channel",
|
||||
Handler: p.betterTLDR,
|
||||
},
|
||||
|
@ -222,7 +244,7 @@ func (p *TLDRPlugin) betterTLDR(r bot.Request) bool {
|
|||
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, "Couldn't fetch an OpenAI client")
|
||||
return true
|
||||
}
|
||||
promptConfig := p.c.Get("tldr.prompttemplate", "Summarize the following conversation:\n")
|
||||
promptConfig := p.c.Get(templateKey, defaultTemplate)
|
||||
promptTpl := template.Must(template.New("gptprompt").Parse(promptConfig))
|
||||
prompt := bytes.Buffer{}
|
||||
data := p.c.GetMap("tldr.promptdata", map[string]string{})
|
||||
|
@ -252,6 +274,27 @@ func (p *TLDRPlugin) betterTLDR(r bot.Request) bool {
|
|||
return true
|
||||
}
|
||||
|
||||
func (p *TLDRPlugin) squawkTLDR(r bot.Request) bool {
|
||||
prompt := p.c.Get(templateKey, defaultTemplate)
|
||||
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, fmt.Sprintf(`Current prompt is: "%s"`,
|
||||
strings.TrimSpace(prompt)))
|
||||
return true
|
||||
}
|
||||
|
||||
func (p *TLDRPlugin) resetTLDR(r bot.Request) bool {
|
||||
p.c.Set(templateKey, defaultTemplate)
|
||||
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, fmt.Sprintf(`Set prompt to: "%s"`,
|
||||
strings.TrimSpace(defaultTemplate)))
|
||||
return true
|
||||
}
|
||||
|
||||
func (p *TLDRPlugin) setTLDR(r bot.Request) bool {
|
||||
prompt := r.Values["prompt"] + "\n"
|
||||
p.c.Set(templateKey, prompt)
|
||||
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, fmt.Sprintf(`Set prompt to: "%s"`, strings.TrimSpace(prompt)))
|
||||
return true
|
||||
}
|
||||
|
||||
func (p *TLDRPlugin) getClient() (*openai.Client, error) {
|
||||
token := p.c.Get("gpt.token", "")
|
||||
if token == "" {
|
||||
|
|
Loading…
Reference in New Issue