llm: timeout old messages

This commit is contained in:
Chris Sexton 2024-09-27 17:12:47 -04:00
parent 97b756245e
commit 0a12e796d0
2 changed files with 36 additions and 5 deletions

View File

@ -49,7 +49,7 @@ func (p *LLMPlugin) gemini(msg string) (chatEntry, error) {
ctx := context.Background() ctx := context.Background()
cs.History = []*genai.Content{} cs.History = []*genai.Content{}
for _, h := range p.chatHistory { for _, h := range p.getChatHistory() {
cs.History = append(cs.History, &genai.Content{ cs.History = append(cs.History, &genai.Content{
Parts: []genai.Part{ Parts: []genai.Part{
genai.Text(h.Content), genai.Text(h.Content),

View File

@ -8,6 +8,7 @@ import (
"github.com/velour/catbase/bot" "github.com/velour/catbase/bot"
"github.com/velour/catbase/config" "github.com/velour/catbase/config"
"regexp" "regexp"
"time"
) )
const gpt3URL = "https://api.openai.com/v1/engines/%s/completions" const gpt3URL = "https://api.openai.com/v1/engines/%s/completions"
@ -27,6 +28,7 @@ type LLMPlugin struct {
type chatEntry struct { type chatEntry struct {
Role string `json:"role"` Role string `json:"role"`
Content string `json:"content"` Content string `json:"content"`
TS time.Time `json:"ts"`
} }
func New(b bot.Bot) *LLMPlugin { func New(b bot.Bot) *LLMPlugin {
@ -74,6 +76,35 @@ func (p *LLMPlugin) setPromptMessage(r bot.Request) bool {
return true return true
} }
const defaultDuration = 15 * time.Minute
func (p *LLMPlugin) getChatHistory() []chatEntry {
horizonTxt := p.c.Get("gemini.horizon", defaultDuration.String())
dur, err := time.ParseDuration(horizonTxt)
if err != nil {
dur = defaultDuration
}
output := []chatEntry{}
for _, e := range p.chatHistory {
if e.TS.After(time.Now().Add(-dur)) {
output = append(output, e)
}
}
return output
}
func (p *LLMPlugin) addChatHistoryUser(content string) {
p.addChatHistory(chatEntry{
Role: "user",
Content: content,
})
}
func (p *LLMPlugin) addChatHistory(content chatEntry) {
content.TS = time.Now()
p.chatHistory = append(p.chatHistory, content)
}
func (p *LLMPlugin) geminiChatMessage(r bot.Request) bool { func (p *LLMPlugin) geminiChatMessage(r bot.Request) bool {
if p.geminiClient == nil && p.geminiConnect() != nil { if p.geminiClient == nil && p.geminiConnect() != nil {
log.Error().Msgf("Could not connect to Gemini") log.Error().Msgf("Could not connect to Gemini")
@ -85,8 +116,8 @@ func (p *LLMPlugin) geminiChatMessage(r bot.Request) bool {
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, fmt.Sprintf("Problem with Gemini: %s", err)) p.b.Send(r.Conn, bot.Message, r.Msg.Channel, fmt.Sprintf("Problem with Gemini: %s", err))
return true return true
} }
p.chatHistory = append(p.chatHistory, chatEntry{"User", r.Values["text"]}) p.addChatHistoryUser(r.Values["text"])
p.chatHistory = append(p.chatHistory, chatResp) p.addChatHistory(chatResp)
p.b.Send(r.Conn, bot.Message, r.Msg.Channel, chatResp.Content) p.b.Send(r.Conn, bot.Message, r.Msg.Channel, chatResp.Content)
log.Info().Msgf("Successfully used Gemini") log.Info().Msgf("Successfully used Gemini")
return true return true