gpt: Rename package to llm

This commit is contained in:
Chris Sexton 2024-05-11 13:56:29 -04:00
parent d6bb334a14
commit ed4136aa60
4 changed files with 17 additions and 17 deletions

View File

@ -1,4 +1,4 @@
package gpt
package llm
import (
"context"
@ -10,7 +10,7 @@ import (
var session openai.ChatSession
var client *openai.Client
func (p *GPTPlugin) getClient() (*openai.Client, error) {
func (p *LLMPlugin) getClient() (*openai.Client, error) {
token := p.c.Get("gpt.token", "")
if token == "" {
return nil, fmt.Errorf("no GPT token given")
@ -18,7 +18,7 @@ func (p *GPTPlugin) getClient() (*openai.Client, error) {
return openai.NewClient(token)
}
func (p *GPTPlugin) chatGPT(request string) (string, error) {
func (p *LLMPlugin) chatGPT(request string) (string, error) {
if client == nil {
if err := p.setPrompt(p.getDefaultPrompt()); err != nil {
return "", err
@ -32,11 +32,11 @@ func (p *GPTPlugin) chatGPT(request string) (string, error) {
return session.Complete(context.Background(), request)
}
func (p *GPTPlugin) getDefaultPrompt() string {
func (p *LLMPlugin) getDefaultPrompt() string {
return p.c.Get("gpt.prompt", "")
}
func (p *GPTPlugin) setPrompt(prompt string) error {
func (p *LLMPlugin) setPrompt(prompt string) error {
var err error
client, err = p.getClient()
if err != nil {

View File

@ -1,4 +1,4 @@
package gpt
package llm
import (
"errors"
@ -13,7 +13,7 @@ import (
const gpt3URL = "https://api.openai.com/v1/engines/%s/completions"
const gpt3ModURL = "https://api.openai.com/v1/moderations"
type GPTPlugin struct {
type LLMPlugin struct {
b bot.Bot
c *config.Config
h bot.HandlerTable
@ -27,8 +27,8 @@ type chatEntry struct {
Content string `json:"content"`
}
func New(b bot.Bot) *GPTPlugin {
p := &GPTPlugin{
func New(b bot.Bot) *LLMPlugin {
p := &LLMPlugin{
b: b,
c: b.Config(),
}
@ -36,7 +36,7 @@ func New(b bot.Bot) *GPTPlugin {
return p
}
func (p *GPTPlugin) register() {
func (p *LLMPlugin) register() {
p.h = bot.HandlerTable{
{
Kind: bot.Message, IsCmd: true,
@ -60,7 +60,7 @@ func (p *GPTPlugin) register() {
p.b.RegisterTable(p, p.h)
}
func (p *GPTPlugin) setPromptMessage(r bot.Request) bool {
func (p *LLMPlugin) setPromptMessage(r bot.Request) bool {
prompt := r.Values["text"]
if err := p.setPrompt(prompt); err != nil {
resp := fmt.Sprintf("Error: %s", err)
@ -70,7 +70,7 @@ func (p *GPTPlugin) setPromptMessage(r bot.Request) bool {
return true
}
func (p *GPTPlugin) chatMessage(r bot.Request) bool {
func (p *LLMPlugin) chatMessage(r bot.Request) bool {
if slices.Contains(p.c.GetArray("gpt.silence", []string{}), r.Msg.Channel) {
log.Debug().Msgf("%s silenced", r.Msg.Channel)
return true
@ -78,7 +78,7 @@ func (p *GPTPlugin) chatMessage(r bot.Request) bool {
return p.chatMessageForce(r)
}
func (p *GPTPlugin) chatMessageForce(r bot.Request) bool {
func (p *LLMPlugin) chatMessageForce(r bot.Request) bool {
p.chatHistory = append(p.chatHistory, chatEntry{
Role: "user",
Content: r.Values["text"],

View File

@ -1,4 +1,4 @@
package gpt
package llm
import (
"bytes"
@ -13,7 +13,7 @@ import (
var InstanceNotFoundError = errors.New("instance not found")
var empty = chatEntry{}
func (g *GPTPlugin) llama() (chatEntry, error) {
func (g *LLMPlugin) llama() (chatEntry, error) {
llamaURL := g.c.Get("gpt.llamaurl", "")
if llamaURL == "" {
return empty, fmt.Errorf("could not find llama url")

View File

@ -22,10 +22,10 @@ import (
"github.com/velour/catbase/plugins/giphy"
"github.com/velour/catbase/plugins/git"
"github.com/velour/catbase/plugins/goals"
"github.com/velour/catbase/plugins/gpt"
"github.com/velour/catbase/plugins/inventory"
"github.com/velour/catbase/plugins/last"
"github.com/velour/catbase/plugins/leftpad"
"github.com/velour/catbase/plugins/llm"
"github.com/velour/catbase/plugins/mayi"
"github.com/velour/catbase/plugins/meme"
"github.com/velour/catbase/plugins/nerdepedia"
@ -101,7 +101,7 @@ func Register(b bot.Bot) {
b.AddPlugin(topic.New(b))
b.AddPlugin(talker.New(b))
b.AddPlugin(fact.New(b))
b.AddPlugin(gpt.New(b))
b.AddPlugin(llm.New(b))
// catches anything left, will always return true
b.AddPlugin(deadend.New(b))
}