本示例演示了如何实现异步用户确认流程,允许在等待用户输入时不阻塞执行。
import asyncio
import json
import httpx
from agno.agent import Agent
from agno.models.openai import OpenAIChat
from agno.tools import tool
from agno.utils import pprint
from rich.console import Console
from rich.prompt import Prompt
console = Console()
@tool(requires_confirmation=True)
async def get_top_hackernews_stories(num_stories: int) -> str:
"""从 Hacker News 获取热门故事。
Args:
num_stories (int): 要检索的故事数量
Returns:
str: 包含故事详情的 JSON 字符串
"""
# 获取热门故事 ID
response = httpx.get("https://hacker-news.firebaseio.com/v0/topstories.json")
story_ids = response.json()
# 产出故事详情
all_stories = []
for story_id in story_ids[:num_stories]:
story_response = httpx.get(
f"https://hacker-news.firebaseio.com/v0/item/{story_id}.json"
)
story = story_response.json()
if "text" in story:
story.pop("text", None)
all_stories.append(story)
return json.dumps(all_stories)
agent = Agent(
model=OpenAIChat(id="gpt-4o-mini"),
tools=[get_top_hackernews_stories],
markdown=True,
)
run_response = asyncio.run(agent.arun("Fetch the top 2 hackernews stories"))
if run_response.is_paused:
for tool in run_response.tools_requiring_confirmation:
# 请求确认
console.print(
f"Tool name [bold blue]{tool.tool_name}({tool.tool_args})[/] requires confirmation."
)
message = (
Prompt.ask("Do you want to continue?", choices=["y", "n"], default="y")
.strip()
.lower()
)
if message == "n":
tool.confirmed = False
else:
tool.confirmed = True
run_response = asyncio.run(agent.acontinue_run(run_response=run_response))
pprint.pprint_run_response(run_response)
创建虚拟环境
打开 Terminal
并创建一个 python 虚拟环境。
python3 -m venv .venv
source .venv/bin/activate
设置您的 API 密钥
export OPENAI_API_KEY=xxx
安装库
pip install -U agno httpx rich openai
运行示例
python cookbook/agent_concepts/user_control_flows/confirmation_required_async.py
agent.arun()
进行异步代理执行agent.acontinue_run()
进行异步继续