package ai import ( "context" "fmt" "time" "eino-project/internal/conf" contextpkg "eino-project/internal/context" "eino-project/internal/monitor" "eino-project/internal/vector" "github.com/cloudwego/eino-ext/components/model/ollama" "github.com/go-kratos/kratos/v2/log" "github.com/google/wire" ) // ProviderSet is ai providers. var ProviderSet = wire.NewSet(NewAIServiceFromConfig) // NewAIServiceFromConfig 从配置创建AI服务 func NewAIServiceFromConfig(c *conf.Bootstrap, logger log.Logger, knowledgeSearcher vector.KnowledgeSearcher, contextManager contextpkg.ContextManager, mon monitor.Monitor) (AIService, error) { if c.Ai == nil || c.Ai.Ollama == nil { return nil, fmt.Errorf("AI configuration is missing") } // 获取超时配置,默认60秒 timeout := 60 * time.Second if c.Ai.Ollama.Timeout != nil { timeout = c.Ai.Ollama.Timeout.AsDuration() } // 设置默认模型 chatModelName := "deepseek-v3.1:671b-cloud" intentModelName := "qwen3:8b" // 从配置中获取模型 if len(c.Ai.Ollama.Models) > 0 && c.Ai.Ollama.Models[0] != "" { chatModelName = c.Ai.Ollama.Models[0] } if len(c.Ai.Ollama.Models) > 1 && c.Ai.Ollama.Models[1] != "" { intentModelName = c.Ai.Ollama.Models[1] } // 创建聊天模型(满足 BaseChatModel 接口) chatModel, err := ollama.NewChatModel(context.Background(), &ollama.ChatModelConfig{ BaseURL: c.Ai.Ollama.Endpoint, Timeout: timeout, Model: chatModelName, }) if err != nil { return nil, fmt.Errorf("failed to create chat model: %w", err) } // 创建意图识别模型(满足 BaseChatModel 接口) intentModel, err := ollama.NewChatModel(context.Background(), &ollama.ChatModelConfig{ BaseURL: c.Ai.Ollama.Endpoint, Timeout: timeout, Model: intentModelName, }) if err != nil { return nil, fmt.Errorf("failed to create intent model: %w", err) } svc := NewAIService(logger, chatModel, intentModel, knowledgeSearcher, contextManager) // 注入轻量监控,用于记录 LLM 使用 if m, ok := svc.(*aiService); ok { m.monitor = mon m.chatModelName = chatModelName } return svc, nil }