mirror of https://github.com/velour/catbase.git
Compare commits
No commits in common. "c089a80ffcf2368e09e45a1f2675d6762242127e" and "852239e89dc0b5d302daa9eeaa4eacd75d932456" have entirely different histories.
c089a80ffc
...
852239e89d
|
@ -82,7 +82,7 @@ func TestBabblerNothingSaid(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func testBabbler(t *testing.T) {
|
||||
func TestBabbler(t *testing.T) {
|
||||
mb := bot.NewMockBot()
|
||||
bp := newBabblerPlugin(mb)
|
||||
assert.NotNil(t, bp)
|
||||
|
|
|
@ -19,10 +19,6 @@ import (
|
|||
"github.com/james-bowman/nlp"
|
||||
)
|
||||
|
||||
const templateKey = "tldr.prompttemplate"
|
||||
|
||||
var defaultTemplate = "Summarize the following conversation:\n"
|
||||
|
||||
type TLDRPlugin struct {
|
||||
b bot.Bot
|
||||
c *config.Config
|
||||
|
@ -59,25 +55,7 @@ func (p *TLDRPlugin) register() {
|
|||
},
|
||||
{
|
||||
Kind: bot.Message, IsCmd: true,
|
||||
Regex: regexp.MustCompile(`tl;?dr-prompt$`),
|
||||
HelpText: "Get the tl;dr prompt",
|
||||
Handler: p.squawkTLDR,
|
||||
},
|
||||
{
|
||||
Kind: bot.Message, IsCmd: true,
|
||||
Regex: regexp.MustCompile(`tl;?dr-prompt reset`),
|
||||
HelpText: "Reset the tl;dr prompt",
|
||||
Handler: p.resetTLDR,
|
||||
},
|
||||
{
|
||||
Kind: bot.Message, IsCmd: true,
|
||||
Regex: regexp.MustCompile(`tl;?dr-prompt (?P<prompt>.*)`),
|
||||
HelpText: "Set the tl;dr prompt",
|
||||
Handler: p.setTLDR,
|
||||
},
|
||||
{
|
||||
Kind: bot.Message, IsCmd: true,
|
||||
Regex: regexp.MustCompile(`tl;?dr`),
|
||||
Regex: regexp.MustCompile(`tl;dr`),
|
||||
HelpText: "Get a summary of the channel",
|
||||
Handler: p.betterTLDR,
|
||||
},
|
||||
|
@ -244,7 +222,7 @@ func (p *TLDRPlugin) betterTLDR(r bot.Request) bool {
|
|||
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, "Couldn't fetch an OpenAI client")
|
||||
return true
|
||||
}
|
||||
promptConfig := p.c.Get(templateKey, defaultTemplate)
|
||||
promptConfig := p.c.Get("tldr.prompttemplate", "Summarize the following conversation:\n")
|
||||
promptTpl := template.Must(template.New("gptprompt").Parse(promptConfig))
|
||||
prompt := bytes.Buffer{}
|
||||
data := p.c.GetMap("tldr.promptdata", map[string]string{})
|
||||
|
@ -274,27 +252,6 @@ func (p *TLDRPlugin) betterTLDR(r bot.Request) bool {
|
|||
return true
|
||||
}
|
||||
|
||||
func (p *TLDRPlugin) squawkTLDR(r bot.Request) bool {
|
||||
prompt := p.c.Get(templateKey, defaultTemplate)
|
||||
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, fmt.Sprintf(`Current prompt is: "%s"`,
|
||||
strings.TrimSpace(prompt)))
|
||||
return true
|
||||
}
|
||||
|
||||
func (p *TLDRPlugin) resetTLDR(r bot.Request) bool {
|
||||
p.c.Set(templateKey, defaultTemplate)
|
||||
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, fmt.Sprintf(`Set prompt to: "%s"`,
|
||||
strings.TrimSpace(defaultTemplate)))
|
||||
return true
|
||||
}
|
||||
|
||||
func (p *TLDRPlugin) setTLDR(r bot.Request) bool {
|
||||
prompt := r.Values["prompt"] + "\n"
|
||||
p.c.Set(templateKey, prompt)
|
||||
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, fmt.Sprintf(`Set prompt to: "%s"`, strings.TrimSpace(prompt)))
|
||||
return true
|
||||
}
|
||||
|
||||
func (p *TLDRPlugin) getClient() (*openai.Client, error) {
|
||||
token := p.c.Get("gpt.token", "")
|
||||
if token == "" {
|
||||
|
|
Loading…
Reference in New Issue