Skip to main content

Sequential Mode

import asyncio
from upsonic import Agent, Task, Team
from upsonic.eval import PerformanceEvaluator

analyst = Agent(
    model="anthropic/claude-sonnet-4-5",
    name="Analyst",
    role="Data Analyst",
    goal="Analyze data",
)

team = Team(
    entities=[analyst],
    mode="sequential",
)

task = Task(description="Calculate 5 + 5")

evaluator = PerformanceEvaluator(
    agent_under_test=team,
    task=task,
    num_iterations=3,
    warmup_runs=1,
)

result = asyncio.run(evaluator.run(print_results=True))

print(f"Avg latency: {result.latency_stats['average'] * 1000:.0f} ms")
print(f"Std dev: {result.latency_stats['std_dev'] * 1000:.0f} ms")

Coordinate Mode

import asyncio
from upsonic import Agent, Task, Team
from upsonic.eval import PerformanceEvaluator

worker_a = Agent(
    model="anthropic/claude-sonnet-4-5",
    name="WorkerA",
    role="Worker",
    goal="Do assigned work",
)

worker_b = Agent(
    model="anthropic/claude-sonnet-4-5",
    name="WorkerB",
    role="Worker",
    goal="Do assigned work",
)

team = Team(
    entities=[worker_a, worker_b],
    mode="coordinate",
    model="anthropic/claude-sonnet-4-5",
)

task = Task(description="What is the tallest mountain in the world?")

evaluator = PerformanceEvaluator(
    agent_under_test=team,
    task=task,
    num_iterations=3,
    warmup_runs=1,
)

result = asyncio.run(evaluator.run(print_results=True))

print(f"Avg latency: {result.latency_stats['average'] * 1000:.0f} ms")

Route Mode

import asyncio
from upsonic import Agent, Task, Team
from upsonic.eval import PerformanceEvaluator

math_agent = Agent(
    model="anthropic/claude-sonnet-4-5",
    name="MathAgent",
    role="Math Solver",
    goal="Solve math problems",
)

trivia_agent = Agent(
    model="anthropic/claude-sonnet-4-5",
    name="TriviaAgent",
    role="Trivia Expert",
    goal="Answer trivia questions",
)

team = Team(
    entities=[math_agent, trivia_agent],
    mode="route",
    model="anthropic/claude-sonnet-4-5",
)

task = Task(description="What is 10 + 20?")

evaluator = PerformanceEvaluator(
    agent_under_test=team,
    task=task,
    num_iterations=3,
    warmup_runs=1,
)

result = asyncio.run(evaluator.run(print_results=True))

print(f"Avg latency: {result.latency_stats['average'] * 1000:.0f} ms")