Coverage for docs/docs_src/user_guide/external_rest_apis/main.py: 100%
13 statements
« prev ^ index » next coverage.py v7.8.0, created at 2025-04-19 12:16 +0000
« prev ^ index » next coverage.py v7.8.0, created at 2025-04-19 12:16 +0000
1import os 1bacde
3from autogen import UserProxyAgent, ConversableAgent, LLMConfig 1bacde
5from fastagency import UI, FastAgency 1bacde
6from fastagency.api.openapi import OpenAPI 1bacde
7from fastagency.runtimes.ag2 import Workflow 1bacde
8from fastagency.ui.console import ConsoleUI 1bacde
10llm_config = LLMConfig( 1bacde
11 model="gpt-4o-mini",
12 api_key=os.getenv("OPENAI_API_KEY"),
13 temperature=0.8,
14)
16WEATHER_OPENAPI_URL = "https://weather.tools.fastagency.ai/openapi.json" 1bacde
17weather_api = OpenAPI.create(openapi_url=WEATHER_OPENAPI_URL) 1bacde
19wf = Workflow() 1bacde
22@wf.register(name="simple_weather", description="Weather chat") 1bacde
23def weather_workflow( 1bacde
24 ui: UI, params: dict[str, str]
25) -> str:
26 initial_message = ui.text_input( 1a
27 sender="Workflow",
28 recipient="User",
29 prompt="What do you want to know about the weather?",
30 )
32 user_agent = UserProxyAgent( 1a
33 name="User_Agent",
34 system_message="You are a user agent",
35 human_input_mode="NEVER",
36 llm_config=llm_config,
37 )
38 weather_agent = ConversableAgent( 1a
39 name="Weather_Agent",
40 system_message="You are a weather agent",
41 human_input_mode="NEVER",
42 llm_config=llm_config,
43 )
45 wf.register_api( 1a
46 api=weather_api,
47 callers=user_agent,
48 executors=weather_agent,
49 )
51 response = user_agent.run( 1a
52 weather_agent,
53 message=initial_message,
54 summary_method="reflection_with_llm",
55 max_turns=3,
56 )
58 return ui.process(response) # type: ignore[no-any-return] 1a
61app = FastAgency(provider=wf, ui=ConsoleUI()) 1bacde