175 lines
4.5 KiB
Markdown
175 lines
4.5 KiB
Markdown
# 代码示例
|
||
|
||
langchain生态
|
||
|
||
- Chain
|
||
|
||
```python
|
||
async def main():
|
||
prompt = ChatPromptTemplate.from_messages([
|
||
("system", "你是一个友好、专业的AI助手。"),
|
||
("human", "{input}")
|
||
])
|
||
|
||
# 2) 定义模型(按你本地 Ollama 配置调整)
|
||
model = ChatOllama(
|
||
model="qwen3:8b", # 或你的实际模型名
|
||
base_url="http://localhost:11434"
|
||
)
|
||
|
||
# 3) 定义输出解析器,将 AIMessage 转字符串
|
||
parser = StrOutputParser()
|
||
|
||
# 4) 组合为表达式管道
|
||
chain = prompt | model | parser
|
||
|
||
# 非流式调用
|
||
text = await chain.ainvoke({"input": "请用一句话做自我介绍"})
|
||
print("非流式:", text)
|
||
|
||
# 流式调用
|
||
print("流式:", end="", flush=True)
|
||
async for chunk in chain.astream({"input": "讲一个简短的笑话"}):
|
||
print(chunk, end="", flush=True)
|
||
```
|
||
|
||
- Graph
|
||
|
||
```python
|
||
# 创建状态图
|
||
async def main():
|
||
workflow = StateGraph(WorkflowState)
|
||
|
||
# 添加节点
|
||
workflow.add_node("intent_analysis", self._intent_analysis_wrapper)
|
||
workflow.add_node("order_diagnosis", self._order_diagnosis_wrapper)
|
||
workflow.add_node("natural_chat", self._natural_chat_wrapper)
|
||
|
||
# 设置入口点
|
||
workflow.set_entry_point("intent_analysis")
|
||
|
||
# 添加条件边
|
||
workflow.add_conditional_edges(
|
||
"intent_analysis",
|
||
self._route_after_intent_analysis,
|
||
{
|
||
"order_diagnosis": "order_diagnosis",
|
||
"natural_chat": "natural_chat",
|
||
"end": END
|
||
}
|
||
)
|
||
|
||
# 添加结束边
|
||
workflow.add_edge("order_diagnosis", END)
|
||
workflow.add_edge("natural_chat", END)
|
||
|
||
# 编译图
|
||
agent = workflow.compile()
|
||
|
||
messages = [HumanMessage(content="Add 3 and 4.")]
|
||
messages = agent.invoke({"messages": messages})
|
||
for m in messages["messages"]:
|
||
print(m.content)
|
||
```
|
||
|
||
对应流程图(Mermaid)
|
||
|
||
```mermaid
|
||
flowchart TD
|
||
IA[intent_analysis] --> ROUTE{判断意图}
|
||
ROUTE -->|order_diagnosis| OD[order_diagnosis]
|
||
ROUTE -->|natural_chat| NC[natural_chat]
|
||
ROUTE -->|end| END((END))
|
||
|
||
OD --> END
|
||
NC --> END
|
||
```
|
||
|
||
说明:当意图分析结果为空或不满足条件时,视为“否”分支,进入 `natural_chat`。
|
||
|
||
- Agent
|
||
|
||
```python
|
||
from langchain.agents import create_agent
|
||
|
||
def get_weather(city: str) -> str:
|
||
"""Get weather for a given city."""
|
||
return f"It's always sunny in {city}!"
|
||
|
||
agent = create_agent(
|
||
model="claude-sonnet-4-5-20250929",
|
||
tools=[get_weather],
|
||
system_prompt="You are a helpful assistant",
|
||
)
|
||
|
||
# Run the agent
|
||
agent.invoke(
|
||
{"messages": [{"role": "user", "content": "what is the weather in sf"}]}
|
||
)
|
||
```
|
||
|
||
Go生态
|
||
|
||
- chain
|
||
|
||
```go
|
||
package main
|
||
|
||
import (
|
||
"context"
|
||
"fmt"
|
||
"os"
|
||
|
||
"github.com/cloudwego/eino/compose"
|
||
"github.com/cloudwego/eino/components/prompt"
|
||
"github.com/cloudwego/eino/schema"
|
||
"github.com/cloudwego/eino-ext/components/model/openai"
|
||
)
|
||
|
||
func main() {
|
||
ctx := context.Background()
|
||
|
||
// 1) 定义 ChatTemplate(输入键:query),使用 Jinja2 格式
|
||
pt := prompt.FromMessages(
|
||
schema.Jinja2,
|
||
schema.SystemMessage("你是一个友好的AI助手。请用中文回答。"),
|
||
schema.UserMessage("用户问题:{{query}}"),
|
||
)
|
||
|
||
// 2) 创建 ChatModel(以 OpenAI 为例,按需替换为其他提供商)
|
||
cm, err := openai.NewChatModel(ctx, &openai.ChatModelConfig{
|
||
APIKey: os.Getenv("OPENAI_API_KEY"),
|
||
BaseURL: os.Getenv("OPENAI_BASE_URL"), // 可选,自建或代理时设置
|
||
Model: os.Getenv("MODEL_NAME"), // 例如:"gpt-4o-mini"
|
||
})
|
||
if err != nil {
|
||
fmt.Println("初始化模型失败:", err)
|
||
return
|
||
}
|
||
|
||
// 3) 组合并编译 Chain:模板 -> 模型
|
||
chain, err := compose.NewChain[map[string]any, *schema.Message]().
|
||
AppendChatTemplate(pt). // prompt
|
||
AppendChatModel(cm). // model
|
||
// AppendBranch 分支
|
||
// AppendPassthrough 空透传节点
|
||
// AppendParallel 并行节点
|
||
// AppendGraph 子图节点
|
||
// AppendLambda 自定义函数节点
|
||
Compile(ctx) // 编译链
|
||
if err != nil {
|
||
fmt.Println("编译链失败:", err)
|
||
return
|
||
}
|
||
|
||
// 4) 调用链(非流式)
|
||
msg, err := chain.Invoke(ctx, map[string]any{"query": "简要介绍一下 Eino 的 Chain"})
|
||
if err != nil {
|
||
fmt.Println("调用失败:", err)
|
||
return
|
||
}
|
||
|
||
fmt.Println("回复:", msg.Content)
|
||
}
|
||
```
|