Compare commits

..

No commits in common. "b57ad12a68ddbc2bedeb1e4381eea0aaa049b66a" and "3058a1e6b93246290bae29b6514f741d29bdb53a" have entirely different histories.

9 changed files with 43 additions and 97 deletions

35
config.yaml Normal file
View File

@ -0,0 +1,35 @@
server:
port: 8090
host: "0.0.0.0"
ollama:
base_url: "http://localhost:11434"
model: "qwen3:8b"
timeout: 30s
# 模型参数
modelParam:
temperature: 0.7
max_tokens: 2000
tools:
weather:
enabled: true
calculator:
enabled: true
zltxOrderDetail: # 直连天下订单详情
enabled: true
base_url: https://gateway.dev.cdlsxd.cn
biz_system: "zltx"
zltxOrderLog: # 直连天下订单日志
enabled: true
base_url: https://gateway.dev.cdlsxd.cn
biz_system: "zltx"
knowledge: # 知识库
enabled: true
base_url: http://117.175.169.61:8080
api_key: sk-EfnUANKMj3DUOiEPJZ5xS8SGMsbO6be_qYAg9uZ8T3zyoFM-
logging:
level: "info"
format: "json"

View File

@ -5,13 +5,12 @@ server:
ollama: ollama:
base_url: "http://127.0.0.1:11434" base_url: "http://127.0.0.1:11434"
model: "qwen3:8b" model: "qwen3-coder:480b-cloud"
timeout: "120s" timeout: "120s"
level: "info" level: "info"
format: "json" format: "json"
sys: sys:
session_len: 6 session_len: 6
channel_pool_len: 100 channel_pool_len: 100

View File

@ -18,7 +18,6 @@ type LangChainService struct {
func NewLangChainGenerate( func NewLangChainGenerate(
client *utils_langchain.UtilLangChain, client *utils_langchain.UtilLangChain,
) *LangChainService { ) *LangChainService {
return &LangChainService{ return &LangChainService{
client: client, client: client,
} }

View File

@ -194,12 +194,10 @@ func (r *AiRouterBiz) recognize(ctx context.Context, requireData *entitys.Requir
Content: "意图识别结束", Content: "意图识别结束",
Type: entitys.ResponseLog, Type: entitys.ResponseLog,
} }
var match entitys.Match if err = json.Unmarshal([]byte(recognizeMsg), requireData.Match); err != nil {
if err = json.Unmarshal([]byte(recognizeMsg), &match); err != nil {
err = errors.SysErr("数据结构错误:%v", err.Error()) err = errors.SysErr("数据结构错误:%v", err.Error())
return return
} }
requireData.Match = &match
return return
} }
@ -267,7 +265,7 @@ func (r *AiRouterBiz) handleMatch(ctx context.Context, requireData *entitys.Requ
if !requireData.Match.IsMatch { if !requireData.Match.IsMatch {
requireData.Ch <- entitys.Response{ requireData.Ch <- entitys.Response{
Index: "", Index: "",
Content: requireData.Match.Chat, Content: requireData.Match.Reasoning,
Type: entitys.ResponseText, Type: entitys.ResponseText,
} }
return return

View File

@ -48,10 +48,9 @@ func MsgSet(msgType ResponseType, msg string, done bool) []byte {
func MsgSend(c *websocket.Conn, msg Response) error { func MsgSend(c *websocket.Conn, msg Response) error {
// 检查上下文是否已取消 // 检查上下文是否已取消
if msg.Type == ResponseText {
}
jsonByte, _ := json.Marshal(msg) jsonByte, _ := json.Marshal(msg)
return c.WriteMessage(websocket.TextMessage, jsonByte) return c.WriteMessage(websocket.TextMessage, jsonByte)
} }

View File

@ -121,7 +121,6 @@ type Match struct {
History []byte `json:"history"` History []byte `json:"history"`
UserInput string `json:"user_input"` UserInput string `json:"user_input"`
Auth string `json:"auth"` Auth string `json:"auth"`
Chat string `json:"chat"`
} }
type ChatHis struct { type ChatHis struct {
SessionId string `json:"session_id"` SessionId string `json:"session_id"`

View File

@ -45,10 +45,10 @@ func (c *Client) ToolSelect(ctx context.Context, messages []api.Message, tools [
Model: c.config.Model, Model: c.config.Model,
Messages: messages, Messages: messages,
Stream: new(bool), // 设置为false不使用流式响应 Stream: new(bool), // 设置为false不使用流式响应
Think: &api.ThinkValue{Value: false}, Think: &api.ThinkValue{Value: true},
Tools: tools, //Tools: tools,
} }
c.client.ListRunning()
err = c.client.Chat(ctx, req, func(resp api.ChatResponse) error { err = c.client.Chat(ctx, req, func(resp api.ChatResponse) error {
res = resp res = resp
return nil return nil

View File

@ -69,11 +69,6 @@ func NewManager(config *config.Config, llm *utils_ollama.Client) *Manager {
knowledgeTool := NewKnowledgeBaseTool(config.Tools.Knowledge) knowledgeTool := NewKnowledgeBaseTool(config.Tools.Knowledge)
m.tools[knowledgeTool.Name()] = knowledgeTool m.tools[knowledgeTool.Name()] = knowledgeTool
} }
// 普通对话
chat := NewNormalChatTool(m.llm)
m.tools[chat.Name()] = chat
return m return m
} }

View File

@ -1,78 +0,0 @@
package tools
import (
"ai_scheduler/internal/entitys"
"ai_scheduler/internal/pkg"
"ai_scheduler/internal/pkg/utils_ollama"
"context"
"encoding/json"
"fmt"
"github.com/ollama/ollama/api"
)
// NormalChatTool 普通对话
type NormalChatTool struct {
llm *utils_ollama.Client
}
// NewNormalChatTool 实例普通对话
func NewNormalChatTool(llm *utils_ollama.Client) *NormalChatTool {
return &NormalChatTool{llm: llm}
}
// Name 返回工具名称
func (w *NormalChatTool) Name() string {
return "normalChat"
}
// Description 返回工具描述
func (w *NormalChatTool) Description() string {
return "用户想进行一般性问答"
}
type NormalChat struct {
ChatContent string `json:"chat_content"`
}
// Definition 返回工具定义
func (w *NormalChatTool) Definition() entitys.ToolDefinition {
return entitys.ToolDefinition{}
}
// Execute 执行直连天下订单详情查询
func (w *NormalChatTool) Execute(ctx context.Context, requireData *entitys.RequireData) error {
var req NormalChat
if err := json.Unmarshal([]byte(requireData.Match.Parameters), &req); err != nil {
return fmt.Errorf("invalid zltxOrderDetail request: %w", err)
}
if req.ChatContent == "" {
req.ChatContent = "介绍一下你能做什么"
}
// 这里可以集成真实的直连天下订单详情API
return w.chat(requireData, &req)
}
// getMockZltxOrderDetail 获取模拟直连天下订单详情数据
func (w *NormalChatTool) chat(requireData *entitys.RequireData, chat *NormalChat) (err error) {
err = w.llm.ChatStream(context.TODO(), requireData.Ch, []api.Message{
{
Role: "system",
Content: "你是一个聊天助手",
},
{
Role: "assistant",
Content: fmt.Sprintf("聊天记录:%s", pkg.JsonStringIgonErr(requireData.Histories)),
},
{
Role: "user",
Content: requireData.UserInput,
},
}, w.Name())
if err != nil {
return fmt.Errorf("%s", err)
}
return
}