Coverage for examples/cli/main_user_proxy.py: 36%
14 statements
« prev ^ index » next coverage.py v7.8.0, created at 2025-04-19 12:16 +0000
« prev ^ index » next coverage.py v7.8.0, created at 2025-04-19 12:16 +0000
1import os 1abcd
2from typing import Any 1abcd
4from autogen import ConversableAgent, LLMConfig, UserProxyAgent 1abcd
6from fastagency import UI, FastAgency 1abcd
7from fastagency.api.openapi import OpenAPI 1abcd
8from fastagency.api.openapi.security import APIKeyHeader 1abcd
9from fastagency.runtimes.ag2 import Workflow 1abcd
10from fastagency.ui.console import ConsoleUI 1abcd
12llm_config = LLMConfig( 1abcd
13 model="gpt-4o-mini",
14 api_key=os.getenv("OPENAI_API_KEY"),
15 temperature=0.8,
16)
19wf = Workflow() 1abcd
22@wf.register(name="weatherman_workflow", description="Weatherman chat") 1abcd
23def simple_workflow(ui: UI, params: dict[str, Any]) -> str: 1abcd
24 with llm_config:
25 user_proxy = UserProxyAgent(
26 name="User_Proxy",
27 human_input_mode="ALWAYS",
28 )
29 weatherman = ConversableAgent(
30 name="Weatherman",
31 system_message="You are a weatherman.",
32 )
34 weather_client = OpenAPI.create(
35 openapi_url="https://weather.tools.fastagency.ai/openapi.json"
36 )
37 # Set global security params for all methods
38 weather_client.set_security_params(
39 APIKeyHeader.Parameters(value="secure weather key")
40 )
42 wf.register_api(
43 api=weather_client,
44 callers=user_proxy,
45 executors=weatherman,
46 )
48 initial_message = ui.text_input(
49 sender="Workflow",
50 recipient="User",
51 prompt="What would you like to find out about weather?",
52 )
54 response = user_proxy.run(
55 weatherman,
56 message=initial_message,
57 summary_method="reflection_with_llm",
58 max_turns=3,
59 )
61 return ui.process(response)
64app = FastAgency(provider=wf, ui=ConsoleUI()) 1abcd