ai-courseware/eino-project/internal/domain/llm/llm.go

45 lines
979 B
Go

package llm
import (
"eino-project/internal/conf"
"sync"
"github.com/cloudwego/eino/components/model"
)
type LLM interface {
Chat() (model.ToolCallingChatModel, error)
Intent() (model.ToolCallingChatModel, error)
}
type llm struct {
cfg *conf.Bootstrap
onceChat sync.Once
onceIntent sync.Once
chat model.ToolCallingChatModel
intent model.ToolCallingChatModel
cache map[string]model.ToolCallingChatModel
}
func NewLLM(cfg *conf.Bootstrap) LLM {
return &llm{cfg: cfg, cache: make(map[string]model.ToolCallingChatModel)}
}
// 获取Ollama聊天模型实例
func (r *llm) Chat() (model.ToolCallingChatModel, error) {
var err error
r.onceChat.Do(func() {
r.chat, err = newOllamaChatModel(r.cfg)
})
return r.chat, err
}
// 获取Ollama意图识别模型实例
func (r *llm) Intent() (model.ToolCallingChatModel, error) {
var err error
r.onceIntent.Do(func() {
r.intent, err = newOllamaIntentModel(r.cfg)
})
return r.intent, err
}