llm: cull history and use a prompt

This commit is contained in:
Chris Sexton 2024-05-11 14:29:43 -04:00
parent 276f6c188e
commit f5fc3b542e
2 changed files with 18 additions and 7 deletions

View File

@ -38,6 +38,12 @@ func New(b bot.Bot) *LLMPlugin {
func (p *LLMPlugin) register() { func (p *LLMPlugin) register() {
p.h = bot.HandlerTable{ p.h = bot.HandlerTable{
{
Kind: bot.Message, IsCmd: true,
Regex: regexp.MustCompile(`(?is)^gpt-prompt: (?P<text>.*)`),
HelpText: "set the ChatGPT prompt",
Handler: p.setPromptMessage,
},
{ {
Kind: bot.Message, IsCmd: true, Kind: bot.Message, IsCmd: true,
Regex: regexp.MustCompile(`(?is)^llm (?P<text>.*)`), Regex: regexp.MustCompile(`(?is)^llm (?P<text>.*)`),
@ -56,12 +62,6 @@ func (p *LLMPlugin) register() {
HelpText: "chat completion", HelpText: "chat completion",
Handler: p.chatMessageForce, Handler: p.chatMessageForce,
}, },
{
Kind: bot.Message, IsCmd: true,
Regex: regexp.MustCompile(`(?is)^gpt-prompt: (?P<text>.*)`),
HelpText: "set the ChatGPT prompt",
Handler: p.setPromptMessage,
},
} }
p.b.RegisterTable(p, p.h) p.b.RegisterTable(p, p.h)
} }
@ -89,6 +89,10 @@ func (p *LLMPlugin) chatMessageForce(r bot.Request) bool {
Role: "user", Role: "user",
Content: r.Values["text"], Content: r.Values["text"],
}) })
maxHist := p.c.GetInt("gpt.maxhist", 10)
if len(p.chatHistory) > maxHist {
p.chatHistory = p.chatHistory[len(p.chatHistory)-maxHist:]
}
chatResp, err := p.llama() chatResp, err := p.llama()
if err == nil { if err == nil {
p.chatHistory = append(p.chatHistory, chatResp) p.chatHistory = append(p.chatHistory, chatResp)

View File

@ -23,9 +23,16 @@ func (g *LLMPlugin) llama() (chatEntry, error) {
return chatEntry{}, fmt.Errorf("could not find llama model") return chatEntry{}, fmt.Errorf("could not find llama model")
} }
prompt := g.c.Get("gpt.lastprompt", g.c.Get("gpt.prompt", ""))
hist := []chatEntry{{
Role: "system",
Content: prompt,
}}
hist = append(hist, g.chatHistory...)
req := llamaRequest{ req := llamaRequest{
Model: llamaModel, Model: llamaModel,
Messages: g.chatHistory, Messages: hist,
Stream: false, Stream: false,
} }