# type: ignore
from agentstack_sdk.a2a.extensions import (
LLMServiceExtensionSpec,
LLMServiceExtensionServer,
TrajectoryExtensionServer,
TrajectoryExtensionSpec
)
async def my_wrapped_agent(
input: Message,
context: RunContext,
trajectory: Annotated[TrajectoryExtensionServer, TrajectoryExtensionSpec()],
llm: Annotated[
LLMServiceExtensionServer,
LLMServiceExtensionSpec.single_demand(
suggested=(
"watsonx:meta-llama/llama-3-3-70b-instruct",
"watsonx:openai/gpt-oss-120b",
)
),
],
):
# ... previous configuration code ...
class LogHandler:
async def on_tool_start(self, tool_name, **kwargs):
await context.yield_async(trajectory.trajectory_metadata(title=tool_name, content=str(kwargs)))
async def on_agent_action(self, action, **kwargs):
await context.yield_async(trajectory.trajectory_metadata(title=action, content=str(kwargs)))
async def on_research_step(self, step, details):
await context.yield_async(trajectory.trajectory_metadata(title=step, content=str(details)))
# Initialize the researcher with the log handler
researcher = GPTResearcher(
query=user_message, report_type="research_report", verbose=True, log_handler=LogHandler()
)