initial a2a-pack
This commit is contained in:
22
.gitea/workflows/build.yml
Normal file
22
.gitea/workflows/build.yml
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
name: build
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths-ignore:
|
||||||
|
- 'deploy/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: build base image
|
||||||
|
run: |
|
||||||
|
IMG=registry.127-0-0-1.nip.io/a2a/a2a-pack-base
|
||||||
|
VERSION=$(grep '^version' pyproject.toml | head -1 | cut -d'"' -f2)
|
||||||
|
docker build -t $IMG:$VERSION -t $IMG:${{ github.sha }} -t $IMG:latest .
|
||||||
|
docker push $IMG:$VERSION
|
||||||
|
docker push $IMG:${{ github.sha }}
|
||||||
|
docker push $IMG:latest
|
||||||
|
echo "pushed a2a-pack-base:$VERSION (and :latest, :${{ github.sha }})"
|
||||||
8
.gitignore
vendored
Normal file
8
.gitignore
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
__pycache__
|
||||||
|
*.pyc
|
||||||
|
.venv
|
||||||
|
.pytest_cache
|
||||||
|
*.egg-info
|
||||||
|
dist
|
||||||
|
build
|
||||||
|
|
||||||
15
Dockerfile
Normal file
15
Dockerfile
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
# Base image for A2A agents. User agents `FROM` this so the SDK is
|
||||||
|
# pre-installed and rebuilds are fast.
|
||||||
|
FROM python:3.11-slim
|
||||||
|
|
||||||
|
WORKDIR /sdk
|
||||||
|
|
||||||
|
COPY pyproject.toml ./
|
||||||
|
COPY a2a_pack ./a2a_pack
|
||||||
|
|
||||||
|
RUN pip install --no-cache-dir .
|
||||||
|
|
||||||
|
# Sanity check
|
||||||
|
RUN python -c "import a2a_pack; print('a2a-pack', a2a_pack.__name__, 'ok')"
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
80
a2a_pack/__init__.py
Normal file
80
a2a_pack/__init__.py
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
from .agent import (
|
||||||
|
A2AAgent,
|
||||||
|
ParamSpec,
|
||||||
|
SkillInputError,
|
||||||
|
SkillInvocationError,
|
||||||
|
SkillNotFound,
|
||||||
|
SkillSpec,
|
||||||
|
skill,
|
||||||
|
)
|
||||||
|
from .auth import APIKeyAuth, JWTAuth, NoAuth
|
||||||
|
from .card import AgentCard, SkillCard
|
||||||
|
from .context import (
|
||||||
|
AgentEvent,
|
||||||
|
ArtifactRef,
|
||||||
|
CancelledByCaller,
|
||||||
|
LocalRunContext,
|
||||||
|
MissingScopes,
|
||||||
|
RunContext,
|
||||||
|
)
|
||||||
|
from .runtime import (
|
||||||
|
AgentRuntime,
|
||||||
|
EgressPolicy,
|
||||||
|
Lifecycle,
|
||||||
|
Resources,
|
||||||
|
Sandbox,
|
||||||
|
SkillPolicy,
|
||||||
|
State,
|
||||||
|
)
|
||||||
|
from .workspace import (
|
||||||
|
FileMatch,
|
||||||
|
FileType,
|
||||||
|
LocalWorkspaceClient,
|
||||||
|
LocalWorkspaceView,
|
||||||
|
WorkspaceAccess,
|
||||||
|
WorkspaceClient,
|
||||||
|
WorkspaceDenied,
|
||||||
|
WorkspaceGrant,
|
||||||
|
WorkspaceMode,
|
||||||
|
WorkspacePatch,
|
||||||
|
WorkspaceView,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"A2AAgent",
|
||||||
|
"APIKeyAuth",
|
||||||
|
"AgentCard",
|
||||||
|
"AgentEvent",
|
||||||
|
"AgentRuntime",
|
||||||
|
"ArtifactRef",
|
||||||
|
"CancelledByCaller",
|
||||||
|
"EgressPolicy",
|
||||||
|
"FileMatch",
|
||||||
|
"FileType",
|
||||||
|
"JWTAuth",
|
||||||
|
"Lifecycle",
|
||||||
|
"LocalRunContext",
|
||||||
|
"LocalWorkspaceClient",
|
||||||
|
"LocalWorkspaceView",
|
||||||
|
"MissingScopes",
|
||||||
|
"NoAuth",
|
||||||
|
"ParamSpec",
|
||||||
|
"Resources",
|
||||||
|
"RunContext",
|
||||||
|
"Sandbox",
|
||||||
|
"SkillCard",
|
||||||
|
"SkillInputError",
|
||||||
|
"SkillInvocationError",
|
||||||
|
"SkillNotFound",
|
||||||
|
"SkillPolicy",
|
||||||
|
"SkillSpec",
|
||||||
|
"State",
|
||||||
|
"WorkspaceAccess",
|
||||||
|
"WorkspaceClient",
|
||||||
|
"WorkspaceDenied",
|
||||||
|
"WorkspaceGrant",
|
||||||
|
"WorkspaceMode",
|
||||||
|
"WorkspacePatch",
|
||||||
|
"WorkspaceView",
|
||||||
|
"skill",
|
||||||
|
]
|
||||||
410
a2a_pack/agent.py
Normal file
410
a2a_pack/agent.py
Normal file
@@ -0,0 +1,410 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import inspect
|
||||||
|
import typing
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Any, Awaitable, Callable, ClassVar, Generic, Sequence, TypeVar
|
||||||
|
|
||||||
|
from pydantic import BaseModel, TypeAdapter
|
||||||
|
|
||||||
|
from .auth import NoAuth
|
||||||
|
from .card import AgentCard
|
||||||
|
from .context import LocalRunContext, RunContext
|
||||||
|
from .runtime import (
|
||||||
|
AgentRuntime,
|
||||||
|
EgressPolicy,
|
||||||
|
Lifecycle,
|
||||||
|
Resources,
|
||||||
|
Sandbox,
|
||||||
|
SkillPolicy,
|
||||||
|
State,
|
||||||
|
)
|
||||||
|
from .workspace import WorkspaceAccess
|
||||||
|
|
||||||
|
ConfigT = TypeVar("ConfigT", bound=BaseModel)
|
||||||
|
AuthT = TypeVar("AuthT", bound=BaseModel)
|
||||||
|
|
||||||
|
|
||||||
|
_RESERVED_PARAM_NAMES = frozenset({"self", "ctx", "context"})
|
||||||
|
|
||||||
|
|
||||||
|
class _EmptyConfig(BaseModel):
|
||||||
|
"""Default config model when an agent declares no config."""
|
||||||
|
|
||||||
|
|
||||||
|
class SkillNotFound(KeyError):
|
||||||
|
"""Raised when invoke() is called with an unknown skill name."""
|
||||||
|
|
||||||
|
|
||||||
|
class SkillInvocationError(RuntimeError):
|
||||||
|
"""Raised when a skill handler raises during invoke()."""
|
||||||
|
|
||||||
|
|
||||||
|
class SkillInputError(ValueError):
|
||||||
|
"""Raised when invoke() inputs fail validation against the skill schema."""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class ParamSpec:
|
||||||
|
"""Validation metadata for a single skill parameter."""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
adapter: TypeAdapter[Any]
|
||||||
|
has_default: bool
|
||||||
|
default: Any = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class SkillSpec:
|
||||||
|
"""Static metadata about a single skill, captured at decoration time."""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
description: str
|
||||||
|
tags: tuple[str, ...]
|
||||||
|
scopes: tuple[str, ...]
|
||||||
|
stream: bool
|
||||||
|
policy: SkillPolicy
|
||||||
|
input_schema: dict[str, Any]
|
||||||
|
output_schema: dict[str, Any]
|
||||||
|
handler: Callable[..., Awaitable[Any]]
|
||||||
|
params: tuple[ParamSpec, ...] = field(default_factory=tuple)
|
||||||
|
output_adapter: TypeAdapter[Any] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def skill(
|
||||||
|
*,
|
||||||
|
name: str | None = None,
|
||||||
|
description: str = "",
|
||||||
|
tags: Sequence[str] = (),
|
||||||
|
scopes: Sequence[str] = (),
|
||||||
|
stream: bool = False,
|
||||||
|
timeout_seconds: float | None = None,
|
||||||
|
idempotent: bool = False,
|
||||||
|
max_retries: int = 0,
|
||||||
|
cost_class: str | None = None,
|
||||||
|
) -> Callable[[Callable[..., Awaitable[Any]]], Callable[..., Awaitable[Any]]]:
|
||||||
|
"""Mark an :class:`A2AAgent` method as a discoverable skill.
|
||||||
|
|
||||||
|
Conventions:
|
||||||
|
|
||||||
|
- The handler MUST be ``async def``.
|
||||||
|
- Its first parameter (after ``self``) MUST be a :class:`RunContext`;
|
||||||
|
the context is supplied by the runtime and is omitted from the
|
||||||
|
published input schema.
|
||||||
|
- Remaining parameters MUST be type-annotated. ``*args`` and ``**kwargs``
|
||||||
|
are rejected.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def decorator(fn: Callable[..., Awaitable[Any]]) -> Callable[..., Awaitable[Any]]:
|
||||||
|
if not inspect.iscoroutinefunction(fn):
|
||||||
|
raise TypeError(
|
||||||
|
f"@skill requires an async function: {fn.__qualname__}"
|
||||||
|
)
|
||||||
|
|
||||||
|
sig = inspect.signature(fn)
|
||||||
|
hints = typing.get_type_hints(fn)
|
||||||
|
params = list(sig.parameters.values())[1:] # drop self
|
||||||
|
if not params:
|
||||||
|
raise TypeError(
|
||||||
|
f"@skill {fn.__qualname__}: missing RunContext parameter"
|
||||||
|
)
|
||||||
|
|
||||||
|
ctx_param, *rest = params
|
||||||
|
ctx_hint = hints.get(ctx_param.name)
|
||||||
|
if ctx_hint is None or not _is_run_context(ctx_hint):
|
||||||
|
raise TypeError(
|
||||||
|
f"@skill {fn.__qualname__}: first arg after self must be "
|
||||||
|
f"annotated as RunContext (got {ctx_hint!r})"
|
||||||
|
)
|
||||||
|
|
||||||
|
properties: dict[str, Any] = {}
|
||||||
|
required: list[str] = []
|
||||||
|
param_specs: list[ParamSpec] = []
|
||||||
|
for p in rest:
|
||||||
|
if p.kind is inspect.Parameter.VAR_POSITIONAL:
|
||||||
|
raise TypeError(
|
||||||
|
f"@skill {fn.__qualname__}: *{p.name} is not allowed"
|
||||||
|
)
|
||||||
|
if p.kind is inspect.Parameter.VAR_KEYWORD:
|
||||||
|
raise TypeError(
|
||||||
|
f"@skill {fn.__qualname__}: **{p.name} is not allowed"
|
||||||
|
)
|
||||||
|
if p.name in _RESERVED_PARAM_NAMES:
|
||||||
|
raise TypeError(
|
||||||
|
f"@skill {fn.__qualname__}: reserved param name {p.name!r}"
|
||||||
|
)
|
||||||
|
if p.name not in hints:
|
||||||
|
raise TypeError(
|
||||||
|
f"@skill {fn.__qualname__}: parameter {p.name!r} is "
|
||||||
|
f"missing a type annotation"
|
||||||
|
)
|
||||||
|
tp = hints[p.name]
|
||||||
|
adapter: TypeAdapter[Any] = TypeAdapter(tp)
|
||||||
|
properties[p.name] = adapter.json_schema()
|
||||||
|
has_default = p.default is not inspect.Parameter.empty
|
||||||
|
if not has_default:
|
||||||
|
required.append(p.name)
|
||||||
|
param_specs.append(
|
||||||
|
ParamSpec(
|
||||||
|
name=p.name,
|
||||||
|
adapter=adapter,
|
||||||
|
has_default=has_default,
|
||||||
|
default=None if not has_default else p.default,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
input_schema: dict[str, Any] = {
|
||||||
|
"type": "object",
|
||||||
|
"properties": properties,
|
||||||
|
"required": required,
|
||||||
|
"additionalProperties": False,
|
||||||
|
}
|
||||||
|
return_tp = hints.get("return", Any)
|
||||||
|
output_adapter: TypeAdapter[Any] = TypeAdapter(return_tp)
|
||||||
|
|
||||||
|
spec = SkillSpec(
|
||||||
|
name=name or fn.__name__,
|
||||||
|
description=description,
|
||||||
|
tags=tuple(tags),
|
||||||
|
scopes=tuple(scopes),
|
||||||
|
stream=stream,
|
||||||
|
policy=SkillPolicy(
|
||||||
|
timeout_seconds=timeout_seconds,
|
||||||
|
idempotent=idempotent,
|
||||||
|
max_retries=max_retries,
|
||||||
|
cost_class=cost_class,
|
||||||
|
),
|
||||||
|
input_schema=input_schema,
|
||||||
|
output_schema=output_adapter.json_schema(),
|
||||||
|
handler=fn,
|
||||||
|
params=tuple(param_specs),
|
||||||
|
output_adapter=output_adapter,
|
||||||
|
)
|
||||||
|
fn.__a2a_skill__ = spec # type: ignore[attr-defined]
|
||||||
|
return fn
|
||||||
|
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
|
def _is_run_context(tp: Any) -> bool:
|
||||||
|
"""True if ``tp`` is :class:`RunContext` or a parametrization of it."""
|
||||||
|
origin = typing.get_origin(tp) or tp
|
||||||
|
try:
|
||||||
|
return isinstance(origin, type) and issubclass(origin, RunContext)
|
||||||
|
except TypeError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class _AgentMeta(type):
|
||||||
|
def __new__(mcs, cls_name, bases, namespace):
|
||||||
|
cls = super().__new__(mcs, cls_name, bases, namespace)
|
||||||
|
skills: dict[str, SkillSpec] = {}
|
||||||
|
for base in bases:
|
||||||
|
skills.update(getattr(base, "_skills", {}))
|
||||||
|
for attr in namespace.values():
|
||||||
|
spec = getattr(attr, "__a2a_skill__", None)
|
||||||
|
if spec is None:
|
||||||
|
continue
|
||||||
|
if spec.name in skills and skills[spec.name].handler is not spec.handler:
|
||||||
|
# Allow overrides from the same chain (parent → child) but
|
||||||
|
# forbid two distinct handlers in the same class.
|
||||||
|
if any(
|
||||||
|
spec.name in getattr(b, "_skills", {})
|
||||||
|
and getattr(b, "_skills")[spec.name].handler is spec.handler
|
||||||
|
for b in bases
|
||||||
|
):
|
||||||
|
pass # legitimate override
|
||||||
|
else:
|
||||||
|
raise TypeError(
|
||||||
|
f"duplicate skill name {spec.name!r} in {cls_name}"
|
||||||
|
)
|
||||||
|
skills[spec.name] = spec
|
||||||
|
cls._skills = skills # type: ignore[attr-defined]
|
||||||
|
return cls
|
||||||
|
|
||||||
|
|
||||||
|
class A2AAgent(Generic[ConfigT, AuthT], metaclass=_AgentMeta):
|
||||||
|
"""Base class for A2A agents.
|
||||||
|
|
||||||
|
Subclasses declare:
|
||||||
|
|
||||||
|
- ``name``, ``description`` (and optional ``version``),
|
||||||
|
- optional ``config_model`` / ``auth_model`` (default to empty / NoAuth),
|
||||||
|
- deployment metadata: ``required_secrets``, ``required_env``,
|
||||||
|
``capabilities``, ``input_modes``, ``output_modes``,
|
||||||
|
- one or more methods decorated with :func:`skill`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
name: ClassVar[str] = ""
|
||||||
|
description: ClassVar[str] = ""
|
||||||
|
version: ClassVar[str] = "0.1.0"
|
||||||
|
|
||||||
|
config_model: ClassVar[type[BaseModel]] = _EmptyConfig
|
||||||
|
auth_model: ClassVar[type[BaseModel]] = NoAuth
|
||||||
|
|
||||||
|
required_secrets: ClassVar[tuple[str, ...]] = ()
|
||||||
|
required_env: ClassVar[tuple[str, ...]] = ()
|
||||||
|
capabilities: ClassVar[dict[str, Any]] = {}
|
||||||
|
input_modes: ClassVar[tuple[str, ...]] = ("application/json",)
|
||||||
|
output_modes: ClassVar[tuple[str, ...]] = ("application/json",)
|
||||||
|
|
||||||
|
# --- runtime / deployment declaration (read by the platform deployer) ---
|
||||||
|
# Sandbox is always microsandbox; not exposed as a knob.
|
||||||
|
lifecycle: ClassVar[Lifecycle] = Lifecycle.EPHEMERAL
|
||||||
|
state: ClassVar[State] = State.NONE
|
||||||
|
state_model: ClassVar[type[BaseModel] | None] = None
|
||||||
|
resources: ClassVar[Resources] = Resources()
|
||||||
|
concurrency: ClassVar[int] = 1
|
||||||
|
egress: ClassVar[EgressPolicy] = EgressPolicy()
|
||||||
|
tools_used: ClassVar[tuple[str, ...]] = ()
|
||||||
|
workspace_access: ClassVar[WorkspaceAccess] = WorkspaceAccess.none()
|
||||||
|
|
||||||
|
_skills: ClassVar[dict[str, SkillSpec]] = {}
|
||||||
|
|
||||||
|
def __init_subclass__(cls, **kwargs: Any) -> None:
|
||||||
|
super().__init_subclass__(**kwargs)
|
||||||
|
if not cls.name:
|
||||||
|
raise TypeError(
|
||||||
|
f"{cls.__name__}.name must be set as a class attribute"
|
||||||
|
)
|
||||||
|
if cls.state is not State.NONE and cls.state_model is None:
|
||||||
|
raise TypeError(
|
||||||
|
f"{cls.__name__} declares state={cls.state.value!r} but "
|
||||||
|
f"state_model is not set"
|
||||||
|
)
|
||||||
|
if cls.lifecycle is Lifecycle.EPHEMERAL and cls.state is State.SESSION:
|
||||||
|
raise TypeError(
|
||||||
|
f"{cls.__name__}: lifecycle=ephemeral is incompatible with "
|
||||||
|
f"state=session"
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def runtime(cls) -> AgentRuntime:
|
||||||
|
"""Aggregate the class-level runtime declaration.
|
||||||
|
|
||||||
|
``sandbox`` is always :attr:`Sandbox.MICROSANDBOX`; it is set here
|
||||||
|
rather than on the class so developers cannot weaken isolation.
|
||||||
|
"""
|
||||||
|
return AgentRuntime(
|
||||||
|
lifecycle=cls.lifecycle,
|
||||||
|
state=cls.state,
|
||||||
|
sandbox=Sandbox.MICROSANDBOX,
|
||||||
|
resources=cls.resources,
|
||||||
|
concurrency=cls.concurrency,
|
||||||
|
egress=cls.egress,
|
||||||
|
tools_used=cls.tools_used,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, config: ConfigT | dict[str, Any] | None = None) -> None:
|
||||||
|
validated = type(self).config_model.model_validate(config or {})
|
||||||
|
self.config: ConfigT = typing.cast(ConfigT, validated)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def skills(self) -> dict[str, SkillSpec]:
|
||||||
|
return type(self)._skills
|
||||||
|
|
||||||
|
async def startup(self, ctx: RunContext[AuthT]) -> None:
|
||||||
|
"""Called once before the first invocation. Override to set up state."""
|
||||||
|
|
||||||
|
async def shutdown(self, ctx: RunContext[AuthT]) -> None:
|
||||||
|
"""Called once before the agent process exits. Override to tear down."""
|
||||||
|
|
||||||
|
async def health(self) -> bool:
|
||||||
|
"""Lightweight liveness check. Override to add real probes."""
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def invoke(
|
||||||
|
self,
|
||||||
|
skill_name: str,
|
||||||
|
ctx: RunContext[AuthT],
|
||||||
|
/,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> Any:
|
||||||
|
"""Invoke a skill with caller-supplied kwargs.
|
||||||
|
|
||||||
|
Inputs are validated and coerced via the skill's pydantic schema.
|
||||||
|
Required scopes are enforced against ``ctx.auth`` before the handler
|
||||||
|
runs. The raw handler return value is returned (Python-typed).
|
||||||
|
"""
|
||||||
|
spec = self.skills.get(skill_name)
|
||||||
|
if spec is None:
|
||||||
|
raise SkillNotFound(skill_name)
|
||||||
|
|
||||||
|
ctx.require_scopes(spec.scopes)
|
||||||
|
|
||||||
|
try:
|
||||||
|
validated = self._validate_inputs(spec, kwargs)
|
||||||
|
except Exception as exc:
|
||||||
|
raise SkillInputError(
|
||||||
|
f"invalid input for skill {spec.name!r}: {exc}"
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
try:
|
||||||
|
return await spec.handler(self, ctx, **validated)
|
||||||
|
except SkillInputError:
|
||||||
|
raise
|
||||||
|
except Exception as exc:
|
||||||
|
raise SkillInvocationError(
|
||||||
|
f"skill {spec.name!r} raised {type(exc).__name__}: {exc}"
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
async def invoke_json(
|
||||||
|
self,
|
||||||
|
skill_name: str,
|
||||||
|
ctx: RunContext[AuthT],
|
||||||
|
payload: dict[str, Any],
|
||||||
|
) -> Any:
|
||||||
|
"""Runtime-facing invoke: takes JSON-shaped payload, returns JSON-shaped result."""
|
||||||
|
spec = self.skills.get(skill_name)
|
||||||
|
if spec is None:
|
||||||
|
raise SkillNotFound(skill_name)
|
||||||
|
result = await self.invoke(skill_name, ctx, **payload)
|
||||||
|
if spec.output_adapter is None:
|
||||||
|
return result
|
||||||
|
return spec.output_adapter.dump_python(result, mode="json")
|
||||||
|
|
||||||
|
async def local_invoke(
|
||||||
|
self,
|
||||||
|
skill_name: str,
|
||||||
|
/,
|
||||||
|
*,
|
||||||
|
auth: AuthT | None = None,
|
||||||
|
secrets: dict[str, str] | None = None,
|
||||||
|
task_id: str = "local-task",
|
||||||
|
workspace: Any = None, # WorkspaceClient or None
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> Any:
|
||||||
|
"""Convenience harness: build a :class:`LocalRunContext` and invoke.
|
||||||
|
|
||||||
|
Useful in tests and notebooks. ``auth`` defaults to a default-constructed
|
||||||
|
instance of the agent's ``auth_model`` (works for :class:`NoAuth`; pass
|
||||||
|
an explicit instance for auth models with required fields). Pass
|
||||||
|
``workspace=`` to bind a :class:`WorkspaceClient`.
|
||||||
|
"""
|
||||||
|
if auth is None:
|
||||||
|
auth = typing.cast(AuthT, type(self).auth_model())
|
||||||
|
ctx: LocalRunContext[AuthT] = LocalRunContext(
|
||||||
|
auth=auth, secrets=secrets, task_id=task_id, workspace=workspace
|
||||||
|
)
|
||||||
|
return await self.invoke(skill_name, ctx, **kwargs)
|
||||||
|
|
||||||
|
def card(self) -> AgentCard:
|
||||||
|
return AgentCard.from_agent(self)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _validate_inputs(
|
||||||
|
spec: SkillSpec, kwargs: dict[str, Any]
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
known = {p.name for p in spec.params}
|
||||||
|
unknown = set(kwargs) - known
|
||||||
|
if unknown:
|
||||||
|
raise ValueError(f"unknown parameters: {sorted(unknown)}")
|
||||||
|
|
||||||
|
validated: dict[str, Any] = {}
|
||||||
|
for p in spec.params:
|
||||||
|
if p.name in kwargs:
|
||||||
|
validated[p.name] = p.adapter.validate_python(kwargs[p.name])
|
||||||
|
elif not p.has_default:
|
||||||
|
raise ValueError(f"missing required parameter: {p.name!r}")
|
||||||
|
# else: omit so the handler's own default applies
|
||||||
|
return validated
|
||||||
35
a2a_pack/auth.py
Normal file
35
a2a_pack/auth.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
"""Pluggable auth principal models.
|
||||||
|
|
||||||
|
These describe *who* is invoking a skill. The runtime auth provider produces
|
||||||
|
an instance of the agent's declared ``auth_model`` and hands it to the
|
||||||
|
:class:`RunContext`.
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from pydantic import BaseModel, ConfigDict, Field
|
||||||
|
|
||||||
|
|
||||||
|
class NoAuth(BaseModel):
|
||||||
|
"""Public agent: no caller identity required."""
|
||||||
|
|
||||||
|
model_config = ConfigDict(extra="forbid", frozen=True)
|
||||||
|
|
||||||
|
|
||||||
|
class APIKeyAuth(BaseModel):
|
||||||
|
"""Caller authenticated by a long-lived API key."""
|
||||||
|
|
||||||
|
model_config = ConfigDict(extra="forbid", frozen=True)
|
||||||
|
|
||||||
|
api_key_id: str
|
||||||
|
scopes: list[str] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class JWTAuth(BaseModel):
|
||||||
|
"""Caller authenticated by a JWT (typically from a user-facing login)."""
|
||||||
|
|
||||||
|
model_config = ConfigDict(extra="forbid", frozen=True)
|
||||||
|
|
||||||
|
sub: str
|
||||||
|
org_id: str | None = None
|
||||||
|
email: str | None = None
|
||||||
|
scopes: list[str] = Field(default_factory=list)
|
||||||
87
a2a_pack/card.py
Normal file
87
a2a_pack/card.py
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
|
from pydantic import BaseModel, ConfigDict, Field
|
||||||
|
|
||||||
|
from .runtime import AgentRuntime, SkillPolicy
|
||||||
|
from .workspace import WorkspaceAccess
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .agent import A2AAgent
|
||||||
|
|
||||||
|
|
||||||
|
class SkillCard(BaseModel):
|
||||||
|
"""Public description of a single skill, shaped for the A2A spec."""
|
||||||
|
|
||||||
|
model_config = ConfigDict(extra="forbid")
|
||||||
|
|
||||||
|
id: str
|
||||||
|
name: str
|
||||||
|
description: str
|
||||||
|
tags: list[str] = Field(default_factory=list)
|
||||||
|
scopes: list[str] = Field(default_factory=list)
|
||||||
|
stream: bool = False
|
||||||
|
policy: SkillPolicy = Field(default_factory=SkillPolicy)
|
||||||
|
input_schema: dict[str, Any]
|
||||||
|
output_schema: dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
|
class AgentCard(BaseModel):
|
||||||
|
"""Public description of an agent.
|
||||||
|
|
||||||
|
Mirrors the A2A Agent Card spec: identity, capabilities, IO modes, and
|
||||||
|
the catalog of skills the agent advertises.
|
||||||
|
"""
|
||||||
|
|
||||||
|
model_config = ConfigDict(extra="forbid")
|
||||||
|
|
||||||
|
name: str
|
||||||
|
description: str
|
||||||
|
version: str
|
||||||
|
skills: list[SkillCard]
|
||||||
|
capabilities: dict[str, Any] = Field(default_factory=dict)
|
||||||
|
input_modes: list[str] = Field(default_factory=lambda: ["application/json"])
|
||||||
|
output_modes: list[str] = Field(default_factory=lambda: ["application/json"])
|
||||||
|
required_secrets: list[str] = Field(default_factory=list)
|
||||||
|
required_env: list[str] = Field(default_factory=list)
|
||||||
|
runtime: AgentRuntime = Field(default_factory=AgentRuntime)
|
||||||
|
state_schema: dict[str, Any] | None = None
|
||||||
|
workspace_access: WorkspaceAccess = Field(default_factory=WorkspaceAccess.none)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_agent(cls, agent: "A2AAgent") -> "AgentCard":
|
||||||
|
agent_cls = type(agent)
|
||||||
|
skills = [
|
||||||
|
SkillCard(
|
||||||
|
id=spec.name,
|
||||||
|
name=spec.name,
|
||||||
|
description=spec.description,
|
||||||
|
tags=list(spec.tags),
|
||||||
|
scopes=list(spec.scopes),
|
||||||
|
stream=spec.stream,
|
||||||
|
policy=spec.policy,
|
||||||
|
input_schema=spec.input_schema,
|
||||||
|
output_schema=spec.output_schema,
|
||||||
|
)
|
||||||
|
for spec in agent.skills.values()
|
||||||
|
]
|
||||||
|
state_schema = (
|
||||||
|
agent_cls.state_model.model_json_schema()
|
||||||
|
if agent_cls.state_model is not None
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
return cls(
|
||||||
|
name=agent_cls.name,
|
||||||
|
description=agent_cls.description,
|
||||||
|
version=agent_cls.version,
|
||||||
|
skills=skills,
|
||||||
|
capabilities=dict(agent_cls.capabilities),
|
||||||
|
input_modes=list(agent_cls.input_modes),
|
||||||
|
output_modes=list(agent_cls.output_modes),
|
||||||
|
required_secrets=list(agent_cls.required_secrets),
|
||||||
|
required_env=list(agent_cls.required_env),
|
||||||
|
runtime=agent_cls.runtime(),
|
||||||
|
state_schema=state_schema,
|
||||||
|
workspace_access=agent_cls.workspace_access,
|
||||||
|
)
|
||||||
1
a2a_pack/cli/__init__.py
Normal file
1
a2a_pack/cli/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""a2a CLI package."""
|
||||||
98
a2a_pack/cli/api_client.py
Normal file
98
a2a_pack/cli/api_client.py
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
"""Thin HTTP client for the control plane API."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
|
||||||
|
class ApiError(RuntimeError):
|
||||||
|
def __init__(self, status: int, message: str) -> None:
|
||||||
|
self.status = status
|
||||||
|
super().__init__(f"API {status}: {message}")
|
||||||
|
|
||||||
|
|
||||||
|
class ControlPlaneClient:
|
||||||
|
def __init__(self, api_url: str, token: str | None = None) -> None:
|
||||||
|
self.api_url = api_url.rstrip("/")
|
||||||
|
self.token = token
|
||||||
|
|
||||||
|
def _headers(self) -> dict[str, str]:
|
||||||
|
h = {"content-type": "application/json"}
|
||||||
|
if self.token:
|
||||||
|
h["authorization"] = f"bearer {self.token}"
|
||||||
|
return h
|
||||||
|
|
||||||
|
def _request(self, method: str, path: str, **kw: Any) -> Any:
|
||||||
|
url = f"{self.api_url}{path}"
|
||||||
|
with httpx.Client(timeout=30.0) as c:
|
||||||
|
resp = c.request(method, url, headers=self._headers(), **kw)
|
||||||
|
if resp.status_code >= 400:
|
||||||
|
try:
|
||||||
|
detail = resp.json().get("detail", resp.text)
|
||||||
|
except Exception: # noqa: BLE001
|
||||||
|
detail = resp.text
|
||||||
|
raise ApiError(resp.status_code, str(detail))
|
||||||
|
if resp.status_code == 204 or not resp.content:
|
||||||
|
return None
|
||||||
|
return resp.json()
|
||||||
|
|
||||||
|
def signup(self, email: str, password: str) -> dict[str, Any]:
|
||||||
|
return self._request("POST", "/v1/auth/signup", json={"email": email, "password": password})
|
||||||
|
|
||||||
|
def login(self, email: str, password: str) -> dict[str, Any]:
|
||||||
|
return self._request("POST", "/v1/auth/login", json={"email": email, "password": password})
|
||||||
|
|
||||||
|
def me(self) -> dict[str, Any]:
|
||||||
|
return self._request("GET", "/v1/me")
|
||||||
|
|
||||||
|
def register_agent(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
name: str,
|
||||||
|
description: str,
|
||||||
|
version: str,
|
||||||
|
image: str,
|
||||||
|
public: bool,
|
||||||
|
card: dict[str, Any],
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
return self._request(
|
||||||
|
"POST",
|
||||||
|
"/v1/agents",
|
||||||
|
json={
|
||||||
|
"name": name,
|
||||||
|
"description": description,
|
||||||
|
"version": version,
|
||||||
|
"image": image,
|
||||||
|
"public": public,
|
||||||
|
"card": card,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def from_source(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
name: str,
|
||||||
|
description: str,
|
||||||
|
version: str,
|
||||||
|
public: bool,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
return self._request(
|
||||||
|
"POST",
|
||||||
|
"/v1/agents/from-source",
|
||||||
|
json={
|
||||||
|
"name": name,
|
||||||
|
"description": description,
|
||||||
|
"version": version,
|
||||||
|
"public": public,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def list_agents(self) -> list[dict[str, Any]]:
|
||||||
|
return self._request("GET", "/v1/agents")
|
||||||
|
|
||||||
|
def get_agent(self, name: str) -> dict[str, Any]:
|
||||||
|
return self._request("GET", f"/v1/agents/{name}")
|
||||||
|
|
||||||
|
def delete_agent(self, name: str) -> None:
|
||||||
|
self._request("DELETE", f"/v1/agents/{name}")
|
||||||
68
a2a_pack/cli/credentials.py
Normal file
68
a2a_pack/cli/credentials.py
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
"""Credentials store at ``~/.a2a/credentials.json``."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
DEFAULT_API_URL = "http://api.127-0-0-1.nip.io"
|
||||||
|
|
||||||
|
|
||||||
|
def _config_dir() -> Path:
|
||||||
|
return Path.home() / ".a2a"
|
||||||
|
|
||||||
|
|
||||||
|
def _creds_path() -> Path:
|
||||||
|
return _config_dir() / "credentials.json"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Credentials:
|
||||||
|
api_url: str
|
||||||
|
token: str
|
||||||
|
email: str
|
||||||
|
|
||||||
|
|
||||||
|
def save(api_url: str, token: str, email: str) -> Path:
|
||||||
|
d = _config_dir()
|
||||||
|
d.mkdir(parents=True, exist_ok=True)
|
||||||
|
path = _creds_path()
|
||||||
|
path.write_text(json.dumps({"api_url": api_url, "token": token, "email": email}))
|
||||||
|
os.chmod(path, 0o600)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def load() -> Credentials | None:
|
||||||
|
path = _creds_path()
|
||||||
|
if not path.exists():
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
data = json.loads(path.read_text())
|
||||||
|
except (OSError, json.JSONDecodeError):
|
||||||
|
return None
|
||||||
|
return Credentials(
|
||||||
|
api_url=data.get("api_url", DEFAULT_API_URL),
|
||||||
|
token=data["token"],
|
||||||
|
email=data.get("email", ""),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def clear() -> bool:
|
||||||
|
path = _creds_path()
|
||||||
|
if path.exists():
|
||||||
|
path.unlink()
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_api_url(override: str | None = None) -> str:
|
||||||
|
if override:
|
||||||
|
return override
|
||||||
|
env = os.environ.get("A2A_API_URL")
|
||||||
|
if env:
|
||||||
|
return env
|
||||||
|
creds = load()
|
||||||
|
if creds is not None:
|
||||||
|
return creds.api_url
|
||||||
|
return DEFAULT_API_URL
|
||||||
34
a2a_pack/cli/loader.py
Normal file
34
a2a_pack/cli/loader.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
"""Load an :class:`A2AAgent` subclass from a string entrypoint."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import importlib
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from ..agent import A2AAgent
|
||||||
|
|
||||||
|
|
||||||
|
def load_agent_class(entrypoint: str, *, project_dir: Path | None = None) -> type[A2AAgent]:
|
||||||
|
"""Resolve ``module:ClassName`` to an :class:`A2AAgent` subclass.
|
||||||
|
|
||||||
|
If ``project_dir`` is given, it is prepended to ``sys.path`` so a local
|
||||||
|
``agent.py`` can be imported without packaging.
|
||||||
|
"""
|
||||||
|
if ":" not in entrypoint:
|
||||||
|
raise ValueError(
|
||||||
|
f"entrypoint must be 'module:ClassName' (got {entrypoint!r})"
|
||||||
|
)
|
||||||
|
module_name, class_name = entrypoint.split(":", 1)
|
||||||
|
|
||||||
|
if project_dir is not None:
|
||||||
|
path = str(project_dir.resolve())
|
||||||
|
if path not in sys.path:
|
||||||
|
sys.path.insert(0, path)
|
||||||
|
|
||||||
|
module = importlib.import_module(module_name)
|
||||||
|
obj = getattr(module, class_name, None)
|
||||||
|
if obj is None:
|
||||||
|
raise AttributeError(f"{module_name} has no attribute {class_name!r}")
|
||||||
|
if not isinstance(obj, type) or not issubclass(obj, A2AAgent):
|
||||||
|
raise TypeError(f"{entrypoint} is not an A2AAgent subclass")
|
||||||
|
return obj
|
||||||
430
a2a_pack/cli/main.py
Normal file
430
a2a_pack/cli/main.py
Normal file
@@ -0,0 +1,430 @@
|
|||||||
|
"""``a2a`` CLI: scaffold, validate, build, deploy."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
from importlib import resources
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import typer
|
||||||
|
import yaml
|
||||||
|
from rich.console import Console
|
||||||
|
from rich.panel import Panel
|
||||||
|
|
||||||
|
from . import credentials
|
||||||
|
from .api_client import ApiError, ControlPlaneClient
|
||||||
|
from .loader import load_agent_class
|
||||||
|
from .manifests import INGRESS_HOST_TEMPLATE, NAMESPACE, render_manifests
|
||||||
|
|
||||||
|
app = typer.Typer(
|
||||||
|
add_completion=False,
|
||||||
|
no_args_is_help=True,
|
||||||
|
help="Build, package, and deploy A2A agents.",
|
||||||
|
)
|
||||||
|
console = Console()
|
||||||
|
|
||||||
|
DEFAULT_REGISTRY_HOST = "localhost:30500"
|
||||||
|
SDK_PYPI_PACKAGE = "a2a_pack"
|
||||||
|
|
||||||
|
|
||||||
|
# --------------------------------------------------------------------------- #
|
||||||
|
# helpers #
|
||||||
|
# --------------------------------------------------------------------------- #
|
||||||
|
|
||||||
|
|
||||||
|
def _fail(msg: str, code: int = 1) -> None:
|
||||||
|
console.print(f"[bold red]error:[/] {msg}")
|
||||||
|
raise typer.Exit(code)
|
||||||
|
|
||||||
|
|
||||||
|
def _read_yaml(path: Path) -> dict[str, Any]:
|
||||||
|
if not path.exists():
|
||||||
|
_fail(f"missing {path.name} (run `a2a init` first)")
|
||||||
|
return yaml.safe_load(path.read_text()) or {}
|
||||||
|
|
||||||
|
|
||||||
|
def _render_template(template: str, /, **vars: Any) -> str:
|
||||||
|
text = (
|
||||||
|
resources.files("a2a_pack.cli.templates")
|
||||||
|
.joinpath(template)
|
||||||
|
.read_text(encoding="utf-8")
|
||||||
|
)
|
||||||
|
# tiny mustache-style substitution; avoids pulling Jinja just for {{ x }}
|
||||||
|
for k, v in vars.items():
|
||||||
|
text = text.replace("{{ " + k + " }}", str(v))
|
||||||
|
return text
|
||||||
|
|
||||||
|
|
||||||
|
def _slug_to_class(slug: str) -> str:
|
||||||
|
parts = re.split(r"[-_\s]+", slug.strip())
|
||||||
|
return "".join(p[:1].upper() + p[1:].lower() for p in parts if p) or "MyAgent"
|
||||||
|
|
||||||
|
|
||||||
|
def _git_short_sha(project: Path) -> str | None:
|
||||||
|
try:
|
||||||
|
out = subprocess.run(
|
||||||
|
["git", "-C", str(project), "rev-parse", "--short=12", "HEAD"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=True,
|
||||||
|
).stdout.strip()
|
||||||
|
return out or None
|
||||||
|
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _sdk_source_dir() -> Path:
|
||||||
|
"""Locate the on-disk a2a_pack source for inclusion in the build context."""
|
||||||
|
pkg_dir = Path(__file__).resolve().parents[1] # .../a2a_pack
|
||||||
|
project_root = pkg_dir.parent # .../apps/a2a
|
||||||
|
if not (project_root / "pyproject.toml").exists():
|
||||||
|
_fail(f"could not find a2a-pack source root from {pkg_dir}")
|
||||||
|
return project_root
|
||||||
|
|
||||||
|
|
||||||
|
def _run(cmd: list[str], **kwargs: Any) -> subprocess.CompletedProcess[str]:
|
||||||
|
console.print(f"[dim]$ {' '.join(cmd)}[/]")
|
||||||
|
return subprocess.run(cmd, check=True, text=True, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
# --------------------------------------------------------------------------- #
|
||||||
|
# auth #
|
||||||
|
# --------------------------------------------------------------------------- #
|
||||||
|
|
||||||
|
|
||||||
|
def _client(api: str | None = None) -> ControlPlaneClient:
|
||||||
|
api_url = credentials.resolve_api_url(api)
|
||||||
|
creds = credentials.load()
|
||||||
|
token = creds.token if creds is not None else None
|
||||||
|
return ControlPlaneClient(api_url, token=token)
|
||||||
|
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def signup(
|
||||||
|
email: str = typer.Option(..., "--email", "-e", prompt=True),
|
||||||
|
password: str = typer.Option(
|
||||||
|
..., "--password", "-p", prompt=True, hide_input=True, confirmation_prompt=True
|
||||||
|
),
|
||||||
|
api: str = typer.Option(credentials.DEFAULT_API_URL, "--api"),
|
||||||
|
) -> None:
|
||||||
|
"""Create an account on the control plane and store the JWT locally."""
|
||||||
|
try:
|
||||||
|
out = ControlPlaneClient(api).signup(email, password)
|
||||||
|
except ApiError as exc:
|
||||||
|
_fail(str(exc))
|
||||||
|
credentials.save(api, out["access_token"], out["user"]["email"])
|
||||||
|
console.print(f"[green]signed up[/] as [cyan]{out['user']['email']}[/] @ {api}")
|
||||||
|
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def login(
|
||||||
|
email: str = typer.Option(..., "--email", "-e", prompt=True),
|
||||||
|
password: str = typer.Option(..., "--password", "-p", prompt=True, hide_input=True),
|
||||||
|
api: str = typer.Option(credentials.DEFAULT_API_URL, "--api"),
|
||||||
|
) -> None:
|
||||||
|
"""Authenticate with the control plane and cache the JWT."""
|
||||||
|
try:
|
||||||
|
out = ControlPlaneClient(api).login(email, password)
|
||||||
|
except ApiError as exc:
|
||||||
|
_fail(str(exc))
|
||||||
|
credentials.save(api, out["access_token"], out["user"]["email"])
|
||||||
|
console.print(f"[green]logged in[/] as [cyan]{out['user']['email']}[/]")
|
||||||
|
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def logout() -> None:
|
||||||
|
"""Forget the cached JWT."""
|
||||||
|
cleared = credentials.clear()
|
||||||
|
console.print("[green]logged out[/]" if cleared else "(not logged in)")
|
||||||
|
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def whoami() -> None:
|
||||||
|
"""Show the currently logged-in user."""
|
||||||
|
creds = credentials.load()
|
||||||
|
if creds is None:
|
||||||
|
_fail("not logged in (run `a2a login` or `a2a signup`)")
|
||||||
|
try:
|
||||||
|
me = _client().me()
|
||||||
|
except ApiError as exc:
|
||||||
|
_fail(str(exc))
|
||||||
|
console.print(f"[cyan]{me['email']}[/] ({creds.api_url})")
|
||||||
|
|
||||||
|
|
||||||
|
@app.command(name="agents")
|
||||||
|
def list_agents(
|
||||||
|
api: str | None = typer.Option(None, "--api"),
|
||||||
|
) -> None:
|
||||||
|
"""List agents visible to the current user."""
|
||||||
|
try:
|
||||||
|
rows = _client(api).list_agents()
|
||||||
|
except ApiError as exc:
|
||||||
|
_fail(str(exc))
|
||||||
|
if not rows:
|
||||||
|
console.print("(no agents)")
|
||||||
|
return
|
||||||
|
for r in rows:
|
||||||
|
url = r.get("url") or "-"
|
||||||
|
console.print(
|
||||||
|
f" [cyan]{r['name']}[/] v{r['version']} [{r['status']}] {url}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# --------------------------------------------------------------------------- #
|
||||||
|
# init #
|
||||||
|
# --------------------------------------------------------------------------- #
|
||||||
|
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def init(
|
||||||
|
name: str = typer.Argument(..., help="Agent / project slug, e.g. research-agent"),
|
||||||
|
description: str = typer.Option("A new A2A agent", "--description", "-d"),
|
||||||
|
target: Path = typer.Option(Path("."), "--target", "-t", help="Parent dir for the new project"),
|
||||||
|
) -> None:
|
||||||
|
"""Scaffold a new agent project."""
|
||||||
|
project = target / name
|
||||||
|
if project.exists():
|
||||||
|
_fail(f"{project} already exists")
|
||||||
|
project.mkdir(parents=True)
|
||||||
|
class_name = _slug_to_class(name)
|
||||||
|
|
||||||
|
files = {
|
||||||
|
"agent.py": _render_template(
|
||||||
|
"agent.py.tmpl",
|
||||||
|
name=name,
|
||||||
|
class_name=class_name,
|
||||||
|
description=description,
|
||||||
|
),
|
||||||
|
"a2a.yaml": _render_template(
|
||||||
|
"a2a.yaml.tmpl", name=name, class_name=class_name
|
||||||
|
),
|
||||||
|
"requirements.txt": _render_template("requirements.txt.tmpl"),
|
||||||
|
"Dockerfile": _render_template(
|
||||||
|
"Dockerfile.tmpl", entrypoint=f"agent:{class_name}"
|
||||||
|
),
|
||||||
|
".dockerignore": _render_template("dockerignore.tmpl"),
|
||||||
|
".gitea/workflows/build.yml": _render_template(
|
||||||
|
"workflow.yml.tmpl", name=name
|
||||||
|
),
|
||||||
|
"deploy/20-deployment.yaml": _render_template(
|
||||||
|
"deployment.yaml.tmpl", name=name
|
||||||
|
),
|
||||||
|
}
|
||||||
|
for relpath, content in files.items():
|
||||||
|
target_path = project / relpath
|
||||||
|
target_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
target_path.write_text(content)
|
||||||
|
console.print(f" [green]+[/] {project}/{relpath}")
|
||||||
|
|
||||||
|
console.print(
|
||||||
|
Panel.fit(
|
||||||
|
f"[bold]{name}[/] scaffolded at [cyan]{project}[/]\n\n"
|
||||||
|
"next:\n"
|
||||||
|
f" cd {project}\n"
|
||||||
|
" a2a card # see what your agent declares\n"
|
||||||
|
" a2a deploy # ship it",
|
||||||
|
title="ok",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# --------------------------------------------------------------------------- #
|
||||||
|
# validate / card / run #
|
||||||
|
# --------------------------------------------------------------------------- #
|
||||||
|
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def validate(
|
||||||
|
project: Path = typer.Option(Path("."), "--project", "-p"),
|
||||||
|
) -> None:
|
||||||
|
"""Load the agent and print its Card schema. Exits non-zero on errors."""
|
||||||
|
cfg = _read_yaml(project / "a2a.yaml")
|
||||||
|
cls = load_agent_class(cfg["entrypoint"], project_dir=project)
|
||||||
|
console.print(f"[green]ok[/] {cls.name} v{cls.version} ({len(cls._skills)} skills)")
|
||||||
|
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def card(
|
||||||
|
project: Path = typer.Option(Path("."), "--project", "-p"),
|
||||||
|
) -> None:
|
||||||
|
"""Print the Agent Card JSON for the project's agent."""
|
||||||
|
cfg = _read_yaml(project / "a2a.yaml")
|
||||||
|
cls = load_agent_class(cfg["entrypoint"], project_dir=project)
|
||||||
|
console.print_json(cls().card().model_dump_json())
|
||||||
|
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def run(
|
||||||
|
entrypoint: str = typer.Option(..., "--entrypoint", "-e", help="module:Class"),
|
||||||
|
host: str = typer.Option("0.0.0.0", "--host"),
|
||||||
|
port: int = typer.Option(8000, "--port"),
|
||||||
|
project: Path = typer.Option(Path("."), "--project", "-p"),
|
||||||
|
) -> None:
|
||||||
|
"""Run the agent's HTTP server locally (used inside the container too)."""
|
||||||
|
from ..serve import serve
|
||||||
|
|
||||||
|
cls = load_agent_class(entrypoint, project_dir=project)
|
||||||
|
serve(cls(), host=host, port=port)
|
||||||
|
|
||||||
|
|
||||||
|
# --------------------------------------------------------------------------- #
|
||||||
|
# build / deploy #
|
||||||
|
# --------------------------------------------------------------------------- #
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_image_ref(name: str, version: str, project: Path, registry: str) -> str:
|
||||||
|
sha = _git_short_sha(project)
|
||||||
|
tag = f"{version}-{sha}" if sha else version
|
||||||
|
return f"{registry}/agents/{name}:{tag}"
|
||||||
|
|
||||||
|
|
||||||
|
def _stage_build_context(project: Path, dst: Path) -> None:
|
||||||
|
"""Copy project files + bundled SDK source into a temp build dir."""
|
||||||
|
for item in project.iterdir():
|
||||||
|
if item.name in {".git", ".venv", "__pycache__", "_a2a_sdk"}:
|
||||||
|
continue
|
||||||
|
target = dst / item.name
|
||||||
|
if item.is_dir():
|
||||||
|
shutil.copytree(item, target, ignore=shutil.ignore_patterns("__pycache__"))
|
||||||
|
else:
|
||||||
|
shutil.copy2(item, target)
|
||||||
|
sdk_src = _sdk_source_dir()
|
||||||
|
sdk_dst = dst / "_a2a_sdk"
|
||||||
|
shutil.copytree(
|
||||||
|
sdk_src,
|
||||||
|
sdk_dst,
|
||||||
|
ignore=shutil.ignore_patterns(".venv", "dist", "build", "__pycache__", "*.egg-info", ".pytest_cache"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def build(
|
||||||
|
project: Path = typer.Option(Path("."), "--project", "-p"),
|
||||||
|
registry: str = typer.Option(DEFAULT_REGISTRY_HOST, "--registry"),
|
||||||
|
push: bool = typer.Option(False, "--push", help="Also push the built image"),
|
||||||
|
) -> None:
|
||||||
|
"""Build (and optionally push) the container image for the agent."""
|
||||||
|
cfg = _read_yaml(project / "a2a.yaml")
|
||||||
|
image = _resolve_image_ref(cfg["name"], cfg["version"], project, registry)
|
||||||
|
|
||||||
|
with tempfile.TemporaryDirectory(prefix="a2a-build-") as tmp:
|
||||||
|
ctx = Path(tmp)
|
||||||
|
_stage_build_context(project, ctx)
|
||||||
|
_run(["docker", "build", "-t", image, str(ctx)])
|
||||||
|
|
||||||
|
console.print(f"[green]built[/] [cyan]{image}[/]")
|
||||||
|
if push:
|
||||||
|
_run(["docker", "push", image])
|
||||||
|
console.print(f"[green]pushed[/] [cyan]{image}[/]")
|
||||||
|
|
||||||
|
|
||||||
|
def _git_push_source(project: Path, push_url: str) -> None:
|
||||||
|
"""Initialize the project as a git repo (if needed) and push to ``push_url``.
|
||||||
|
|
||||||
|
Idempotent: re-running on an existing repo just commits any changes
|
||||||
|
and pushes.
|
||||||
|
"""
|
||||||
|
git_dir = project / ".git"
|
||||||
|
if not git_dir.exists():
|
||||||
|
_run(["git", "-C", str(project), "init", "-q", "-b", "main"])
|
||||||
|
_run(["git", "-C", str(project), "config", "user.email", "agent@a2a.local"])
|
||||||
|
_run(["git", "-C", str(project), "config", "user.name", "agent"])
|
||||||
|
# Ensure remote
|
||||||
|
have_remote = subprocess.run(
|
||||||
|
["git", "-C", str(project), "remote", "get-url", "origin"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=False,
|
||||||
|
).returncode == 0
|
||||||
|
if have_remote:
|
||||||
|
_run(["git", "-C", str(project), "remote", "set-url", "origin", push_url])
|
||||||
|
else:
|
||||||
|
_run(["git", "-C", str(project), "remote", "add", "origin", push_url])
|
||||||
|
_run(["git", "-C", str(project), "add", "-A"])
|
||||||
|
status = subprocess.run(
|
||||||
|
["git", "-C", str(project), "status", "--porcelain"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=True,
|
||||||
|
).stdout.strip()
|
||||||
|
if status:
|
||||||
|
_run(["git", "-C", str(project), "commit", "-q", "-m", "deploy"])
|
||||||
|
# Pull --rebase first to integrate any auto-bump commits the runner pushed back.
|
||||||
|
subprocess.run(
|
||||||
|
["git", "-C", str(project), "pull", "--rebase", "origin", "main"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=False,
|
||||||
|
)
|
||||||
|
_run(["git", "-C", str(project), "push", "-u", "origin", "main"])
|
||||||
|
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def deploy(
|
||||||
|
project: Path = typer.Option(Path("."), "--project", "-p"),
|
||||||
|
public: bool | None = typer.Option(None, "--public/--private"),
|
||||||
|
api: str | None = typer.Option(None, "--api", help="Override control plane URL"),
|
||||||
|
) -> None:
|
||||||
|
"""Push source to gitea; the platform builds + deploys.
|
||||||
|
|
||||||
|
No local docker. The CLI:
|
||||||
|
1. asks the control plane to provision a gitea repo + ArgoCD app
|
||||||
|
2. ``git push``es the project source to that repo
|
||||||
|
The Gitea Actions runner builds the image and ArgoCD reconciles
|
||||||
|
``deploy/`` onto the cluster. Returns the public URL once registered.
|
||||||
|
"""
|
||||||
|
creds = credentials.load()
|
||||||
|
if creds is None:
|
||||||
|
_fail("not logged in (run `a2a signup` or `a2a login`)")
|
||||||
|
|
||||||
|
cfg = _read_yaml(project / "a2a.yaml")
|
||||||
|
cls = load_agent_class(cfg["entrypoint"], project_dir=project)
|
||||||
|
is_public = (
|
||||||
|
public if public is not None else bool(cfg.get("expose", {}).get("public", True))
|
||||||
|
)
|
||||||
|
description = cfg.get("description", cls.description or "")
|
||||||
|
|
||||||
|
if not (project / "deploy").exists() or not (project / ".gitea/workflows/build.yml").exists():
|
||||||
|
_fail(
|
||||||
|
"project missing deploy/ or .gitea/workflows/. "
|
||||||
|
"Re-run `a2a init` or upgrade the scaffold."
|
||||||
|
)
|
||||||
|
|
||||||
|
client = _client(api)
|
||||||
|
console.print(f"[dim]asking control plane to provision repo + argo app...[/]")
|
||||||
|
try:
|
||||||
|
prov = client.from_source(
|
||||||
|
name=cls.name,
|
||||||
|
description=description,
|
||||||
|
version=cfg["version"],
|
||||||
|
public=is_public,
|
||||||
|
)
|
||||||
|
except ApiError as exc:
|
||||||
|
_fail(str(exc))
|
||||||
|
|
||||||
|
console.print(f"[dim]pushing source → {prov['repo_url']}[/]")
|
||||||
|
_git_push_source(project, prov["push_url"])
|
||||||
|
|
||||||
|
summary = {
|
||||||
|
"agent": prov["name"],
|
||||||
|
"repo": prov["repo_url"],
|
||||||
|
"url": prov.get("expected_url"),
|
||||||
|
"card": f"{prov['expected_url']}/.well-known/agent-card" if prov.get("expected_url") else None,
|
||||||
|
"next": "the runner is building. `a2a agents` to check status, or curl the url in ~30s",
|
||||||
|
}
|
||||||
|
console.print(
|
||||||
|
Panel.fit(
|
||||||
|
json.dumps(summary, indent=2),
|
||||||
|
title="[bold green]shipped[/]",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Used as `python -m a2a_pack.cli.main`
|
||||||
|
if __name__ == "__main__": # pragma: no cover
|
||||||
|
app()
|
||||||
133
a2a_pack/cli/manifests.py
Normal file
133
a2a_pack/cli/manifests.py
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
"""Generate Kubernetes manifests for a deployed agent.
|
||||||
|
|
||||||
|
Targets the existing local cluster: namespace ``agents``, registry at
|
||||||
|
``localhost:30500``, traefik ingress at ``<name>.127-0-0-1.nip.io``.
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
from ..agent import A2AAgent
|
||||||
|
|
||||||
|
NAMESPACE = "agents"
|
||||||
|
INGRESS_HOST_TEMPLATE = "{name}.127-0-0-1.nip.io"
|
||||||
|
|
||||||
|
|
||||||
|
def render_manifests(
|
||||||
|
agent_cls: type[A2AAgent],
|
||||||
|
*,
|
||||||
|
image: str,
|
||||||
|
public: bool = True,
|
||||||
|
) -> str:
|
||||||
|
"""Return a multi-doc YAML string ready for ``kubectl apply -f -``."""
|
||||||
|
rt = agent_cls.runtime()
|
||||||
|
name = agent_cls.name
|
||||||
|
docs: list[dict[str, Any]] = []
|
||||||
|
|
||||||
|
docs.append(
|
||||||
|
{
|
||||||
|
"apiVersion": "v1",
|
||||||
|
"kind": "Namespace",
|
||||||
|
"metadata": {"name": NAMESPACE},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
docs.append(
|
||||||
|
{
|
||||||
|
"apiVersion": "apps/v1",
|
||||||
|
"kind": "Deployment",
|
||||||
|
"metadata": {
|
||||||
|
"name": name,
|
||||||
|
"namespace": NAMESPACE,
|
||||||
|
"labels": {
|
||||||
|
"app": name,
|
||||||
|
"a2a/version": agent_cls.version,
|
||||||
|
"a2a/lifecycle": rt.lifecycle.value,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"spec": {
|
||||||
|
"replicas": 1,
|
||||||
|
"selector": {"matchLabels": {"app": name}},
|
||||||
|
"template": {
|
||||||
|
"metadata": {"labels": {"app": name}},
|
||||||
|
"spec": {
|
||||||
|
"containers": [
|
||||||
|
{
|
||||||
|
"name": "agent",
|
||||||
|
"image": image,
|
||||||
|
"imagePullPolicy": "Always",
|
||||||
|
"ports": [{"containerPort": 8000, "name": "http"}],
|
||||||
|
"readinessProbe": {
|
||||||
|
"httpGet": {"path": "/healthz", "port": 8000},
|
||||||
|
"initialDelaySeconds": 2,
|
||||||
|
"periodSeconds": 5,
|
||||||
|
},
|
||||||
|
"livenessProbe": {
|
||||||
|
"httpGet": {"path": "/healthz", "port": 8000},
|
||||||
|
"initialDelaySeconds": 10,
|
||||||
|
"periodSeconds": 15,
|
||||||
|
},
|
||||||
|
"resources": {
|
||||||
|
"requests": {
|
||||||
|
"cpu": rt.resources.cpu,
|
||||||
|
"memory": rt.resources.memory,
|
||||||
|
},
|
||||||
|
"limits": {
|
||||||
|
"cpu": rt.resources.cpu,
|
||||||
|
"memory": rt.resources.memory,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
docs.append(
|
||||||
|
{
|
||||||
|
"apiVersion": "v1",
|
||||||
|
"kind": "Service",
|
||||||
|
"metadata": {"name": name, "namespace": NAMESPACE},
|
||||||
|
"spec": {
|
||||||
|
"type": "ClusterIP",
|
||||||
|
"selector": {"app": name},
|
||||||
|
"ports": [{"name": "http", "port": 80, "targetPort": 8000}],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if public:
|
||||||
|
docs.append(
|
||||||
|
{
|
||||||
|
"apiVersion": "networking.k8s.io/v1",
|
||||||
|
"kind": "Ingress",
|
||||||
|
"metadata": {"name": name, "namespace": NAMESPACE},
|
||||||
|
"spec": {
|
||||||
|
"rules": [
|
||||||
|
{
|
||||||
|
"host": INGRESS_HOST_TEMPLATE.format(name=name),
|
||||||
|
"http": {
|
||||||
|
"paths": [
|
||||||
|
{
|
||||||
|
"path": "/",
|
||||||
|
"pathType": "Prefix",
|
||||||
|
"backend": {
|
||||||
|
"service": {
|
||||||
|
"name": name,
|
||||||
|
"port": {"number": 80},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return "---\n".join(yaml.safe_dump(d, sort_keys=False) for d in docs)
|
||||||
14
a2a_pack/cli/templates/Dockerfile.tmpl
Normal file
14
a2a_pack/cli/templates/Dockerfile.tmpl
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
FROM registry.127-0-0-1.nip.io/a2a/a2a-pack-base:latest
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
ENV A2A_ENTRYPOINT={{ entrypoint }}
|
||||||
|
ENV PORT=8000
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
CMD a2a run --entrypoint "$A2A_ENTRYPOINT" --host 0.0.0.0 --port 8000
|
||||||
8
a2a_pack/cli/templates/a2a.yaml.tmpl
Normal file
8
a2a_pack/cli/templates/a2a.yaml.tmpl
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# Project identity for `a2a deploy`. Most metadata (resources, scopes,
|
||||||
|
# secrets, workspace, etc.) lives on the Python class — this file only
|
||||||
|
# tells the CLI how to find it.
|
||||||
|
name: {{ name }}
|
||||||
|
version: 0.1.0
|
||||||
|
entrypoint: agent:{{ class_name }}
|
||||||
|
expose:
|
||||||
|
public: true
|
||||||
24
a2a_pack/cli/templates/agent.py.tmpl
Normal file
24
a2a_pack/cli/templates/agent.py.tmpl
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
"""{{ name }} agent."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from a2a_pack import A2AAgent, NoAuth, RunContext, skill
|
||||||
|
|
||||||
|
|
||||||
|
class {{ class_name }}Config(BaseModel):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class {{ class_name }}(A2AAgent[{{ class_name }}Config, NoAuth]):
|
||||||
|
name = "{{ name }}"
|
||||||
|
description = "{{ description }}"
|
||||||
|
version = "0.1.0"
|
||||||
|
|
||||||
|
config_model = {{ class_name }}Config
|
||||||
|
auth_model = NoAuth
|
||||||
|
|
||||||
|
@skill(description="Say hello")
|
||||||
|
async def hello(self, ctx: RunContext[NoAuth], who: str = "world") -> str:
|
||||||
|
await ctx.emit_progress(f"greeting {who}")
|
||||||
|
return f"hello {who}"
|
||||||
71
a2a_pack/cli/templates/deployment.yaml.tmpl
Normal file
71
a2a_pack/cli/templates/deployment.yaml.tmpl
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: {{ name }}
|
||||||
|
namespace: agents
|
||||||
|
labels:
|
||||||
|
app: {{ name }}
|
||||||
|
a2a/managed-by: control-plane
|
||||||
|
spec:
|
||||||
|
replicas: 1
|
||||||
|
strategy:
|
||||||
|
type: Recreate
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: {{ name }}
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: {{ name }}
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: agent
|
||||||
|
# tag is rewritten by the build workflow on every push
|
||||||
|
image: registry.127-0-0-1.nip.io/agents/{{ name }}:latest
|
||||||
|
imagePullPolicy: Always
|
||||||
|
ports:
|
||||||
|
- containerPort: 8000
|
||||||
|
name: http
|
||||||
|
readinessProbe:
|
||||||
|
httpGet: {path: /healthz, port: 8000}
|
||||||
|
initialDelaySeconds: 2
|
||||||
|
periodSeconds: 5
|
||||||
|
livenessProbe:
|
||||||
|
httpGet: {path: /healthz, port: 8000}
|
||||||
|
initialDelaySeconds: 10
|
||||||
|
periodSeconds: 15
|
||||||
|
resources:
|
||||||
|
requests: {cpu: 100m, memory: 256Mi}
|
||||||
|
limits: {cpu: 200m, memory: 256Mi}
|
||||||
|
---
|
||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: {{ name }}
|
||||||
|
namespace: agents
|
||||||
|
spec:
|
||||||
|
type: ClusterIP
|
||||||
|
selector:
|
||||||
|
app: {{ name }}
|
||||||
|
ports:
|
||||||
|
- name: http
|
||||||
|
port: 80
|
||||||
|
targetPort: 8000
|
||||||
|
---
|
||||||
|
apiVersion: networking.k8s.io/v1
|
||||||
|
kind: Ingress
|
||||||
|
metadata:
|
||||||
|
name: {{ name }}
|
||||||
|
namespace: agents
|
||||||
|
spec:
|
||||||
|
rules:
|
||||||
|
- host: {{ name }}.127-0-0-1.nip.io
|
||||||
|
http:
|
||||||
|
paths:
|
||||||
|
- path: /
|
||||||
|
pathType: Prefix
|
||||||
|
backend:
|
||||||
|
service:
|
||||||
|
name: {{ name }}
|
||||||
|
port:
|
||||||
|
number: 80
|
||||||
7
a2a_pack/cli/templates/dockerignore.tmpl
Normal file
7
a2a_pack/cli/templates/dockerignore.tmpl
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
__pycache__
|
||||||
|
*.pyc
|
||||||
|
.venv
|
||||||
|
.git
|
||||||
|
.pytest_cache
|
||||||
|
.mypy_cache
|
||||||
|
node_modules
|
||||||
1
a2a_pack/cli/templates/requirements.txt.tmpl
Normal file
1
a2a_pack/cli/templates/requirements.txt.tmpl
Normal file
@@ -0,0 +1 @@
|
|||||||
|
# add agent-specific deps here; a2a-pack is auto-installed by the deploy build
|
||||||
35
a2a_pack/cli/templates/workflow.yml.tmpl
Normal file
35
a2a_pack/cli/templates/workflow.yml.tmpl
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
name: build
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths-ignore:
|
||||||
|
- 'deploy/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: build image
|
||||||
|
run: |
|
||||||
|
IMG=registry.127-0-0-1.nip.io/agents/{{ name }}
|
||||||
|
docker build -t "$IMG:$GITHUB_SHA" -t "$IMG:latest" .
|
||||||
|
docker push "$IMG:$GITHUB_SHA"
|
||||||
|
docker push "$IMG:latest"
|
||||||
|
|
||||||
|
- name: bump deploy manifest
|
||||||
|
run: |
|
||||||
|
IMG=registry.127-0-0-1.nip.io/agents/{{ name }}
|
||||||
|
sed -i "s|image: $IMG:.*|image: $IMG:$GITHUB_SHA|" deploy/20-deployment.yaml
|
||||||
|
git config user.email "ci@a2a.local"
|
||||||
|
git config user.name "ci"
|
||||||
|
git add deploy/20-deployment.yaml
|
||||||
|
if git diff --staged --quiet; then
|
||||||
|
echo "no manifest changes"
|
||||||
|
else
|
||||||
|
git commit -m "ci: bump image to $GITHUB_SHA"
|
||||||
|
git push "http://gitea_admin:gitea_admin@gitea-http.gitea.svc.cluster.local:3000/gitea_admin/{{ name }}.git" HEAD:main
|
||||||
|
fi
|
||||||
192
a2a_pack/context.py
Normal file
192
a2a_pack/context.py
Normal file
@@ -0,0 +1,192 @@
|
|||||||
|
"""Runtime context handed to skill handlers.
|
||||||
|
|
||||||
|
The same agent code runs unchanged on local dev, Docker, Kubernetes, and
|
||||||
|
hosted runtimes — the runtime provides a concrete :class:`RunContext` that
|
||||||
|
implements artifact storage, secret access, streaming, and cancellation.
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Any, Generic, Sequence, TypeVar
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from .workspace import WorkspaceClient
|
||||||
|
|
||||||
|
AuthT = TypeVar("AuthT", bound=BaseModel)
|
||||||
|
|
||||||
|
|
||||||
|
class CancelledByCaller(RuntimeError):
|
||||||
|
"""Raised by :meth:`RunContext.check_cancelled` when the caller cancelled."""
|
||||||
|
|
||||||
|
|
||||||
|
class MissingScopes(PermissionError):
|
||||||
|
"""Raised by :meth:`RunContext.require_scopes` when caller lacks scopes."""
|
||||||
|
|
||||||
|
def __init__(self, missing: Sequence[str]) -> None:
|
||||||
|
self.missing = tuple(missing)
|
||||||
|
super().__init__(f"missing scopes: {sorted(self.missing)}")
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class ArtifactRef:
|
||||||
|
"""Opaque handle to a stored artifact (blob, file, etc.)."""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
uri: str
|
||||||
|
mime_type: str
|
||||||
|
size_bytes: int
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class AgentEvent:
|
||||||
|
"""A structured event emitted during a skill run."""
|
||||||
|
|
||||||
|
kind: str
|
||||||
|
payload: dict[str, Any] = field(default_factory=dict)
|
||||||
|
|
||||||
|
|
||||||
|
class RunContext(ABC, Generic[AuthT]):
|
||||||
|
"""Per-invocation context.
|
||||||
|
|
||||||
|
A new context is constructed by the runtime for every skill call. It
|
||||||
|
carries caller identity (``auth``), the task identity, and runtime
|
||||||
|
capabilities (artifacts, secrets, streaming, cancellation).
|
||||||
|
|
||||||
|
Agents must depend only on this abstract interface, never on a concrete
|
||||||
|
runtime implementation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
task_id: str
|
||||||
|
auth: AuthT
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def emit_event(self, event: AgentEvent) -> None:
|
||||||
|
"""Publish a structured event to subscribers (UI, logs, traces)."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def write_artifact(
|
||||||
|
self, name: str, data: bytes, mime_type: str
|
||||||
|
) -> ArtifactRef:
|
||||||
|
"""Persist ``data`` as a named artifact and return a reference."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def check_cancelled(self) -> None:
|
||||||
|
"""Raise :class:`CancelledByCaller` if the caller cancelled."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def secret(self, name: str) -> str:
|
||||||
|
"""Look up a runtime-injected secret by logical name."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def workspace(self) -> WorkspaceClient:
|
||||||
|
"""Negotiation surface for workspace access.
|
||||||
|
|
||||||
|
Raises if the agent's :attr:`A2AAgent.workspace_access` is disabled.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# --- concrete helpers built on emit_event ---
|
||||||
|
|
||||||
|
async def emit_progress(self, message: str) -> None:
|
||||||
|
"""Emit a human-readable progress event."""
|
||||||
|
await self.emit_event(AgentEvent(kind="progress", payload={"message": message}))
|
||||||
|
|
||||||
|
async def emit_text_delta(self, text: str) -> None:
|
||||||
|
"""Emit a streamed token chunk (for LLM-style streaming output)."""
|
||||||
|
await self.emit_event(AgentEvent(kind="text_delta", payload={"text": text}))
|
||||||
|
|
||||||
|
async def emit_artifact(self, ref: ArtifactRef) -> None:
|
||||||
|
"""Notify subscribers that a new artifact is available."""
|
||||||
|
await self.emit_event(
|
||||||
|
AgentEvent(
|
||||||
|
kind="artifact",
|
||||||
|
payload={
|
||||||
|
"name": ref.name,
|
||||||
|
"uri": ref.uri,
|
||||||
|
"mime_type": ref.mime_type,
|
||||||
|
"size_bytes": ref.size_bytes,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def emit_error(self, message: str, *, code: str | None = None) -> None:
|
||||||
|
"""Emit a structured error event (does not raise)."""
|
||||||
|
await self.emit_event(
|
||||||
|
AgentEvent(kind="error", payload={"message": message, "code": code})
|
||||||
|
)
|
||||||
|
|
||||||
|
def require_scopes(self, required: Sequence[str]) -> None:
|
||||||
|
"""Raise :class:`MissingScopes` if ``self.auth`` lacks any required scope.
|
||||||
|
|
||||||
|
Auth models without a ``scopes`` attribute (e.g. :class:`NoAuth`) are
|
||||||
|
treated as having an empty scope set.
|
||||||
|
"""
|
||||||
|
if not required:
|
||||||
|
return
|
||||||
|
auth_scopes = set(getattr(self.auth, "scopes", ()) or ())
|
||||||
|
missing = [s for s in required if s not in auth_scopes]
|
||||||
|
if missing:
|
||||||
|
raise MissingScopes(missing)
|
||||||
|
|
||||||
|
|
||||||
|
class LocalRunContext(RunContext[AuthT]):
|
||||||
|
"""In-memory context for local dev and tests.
|
||||||
|
|
||||||
|
Stores events and artifacts in lists/dicts. Secrets come from a plain
|
||||||
|
mapping. Cancellation is driven by an :class:`asyncio.Event`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
auth: AuthT,
|
||||||
|
task_id: str = "local-task",
|
||||||
|
secrets: dict[str, str] | None = None,
|
||||||
|
workspace: WorkspaceClient | None = None,
|
||||||
|
) -> None:
|
||||||
|
self.task_id = task_id
|
||||||
|
self.auth = auth
|
||||||
|
self._secrets: dict[str, str] = dict(secrets or {})
|
||||||
|
self._workspace = workspace
|
||||||
|
self._cancel = asyncio.Event()
|
||||||
|
self.events: list[AgentEvent] = []
|
||||||
|
self.artifacts: dict[str, bytes] = {}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def workspace(self) -> WorkspaceClient:
|
||||||
|
if self._workspace is None:
|
||||||
|
raise PermissionError(
|
||||||
|
"no workspace bound to this context; agent did not declare "
|
||||||
|
"workspace_access or runtime did not provision one"
|
||||||
|
)
|
||||||
|
return self._workspace
|
||||||
|
|
||||||
|
async def emit_event(self, event: AgentEvent) -> None:
|
||||||
|
self.events.append(event)
|
||||||
|
|
||||||
|
async def write_artifact(
|
||||||
|
self, name: str, data: bytes, mime_type: str
|
||||||
|
) -> ArtifactRef:
|
||||||
|
self.artifacts[name] = data
|
||||||
|
return ArtifactRef(
|
||||||
|
name=name,
|
||||||
|
uri=f"memory://{self.task_id}/{name}",
|
||||||
|
mime_type=mime_type,
|
||||||
|
size_bytes=len(data),
|
||||||
|
)
|
||||||
|
|
||||||
|
async def check_cancelled(self) -> None:
|
||||||
|
if self._cancel.is_set():
|
||||||
|
raise CancelledByCaller(self.task_id)
|
||||||
|
|
||||||
|
def cancel(self) -> None:
|
||||||
|
self._cancel.set()
|
||||||
|
|
||||||
|
def secret(self, name: str) -> str:
|
||||||
|
try:
|
||||||
|
return self._secrets[name]
|
||||||
|
except KeyError as exc:
|
||||||
|
raise KeyError(f"unknown secret: {name!r}") from exc
|
||||||
85
a2a_pack/runtime.py
Normal file
85
a2a_pack/runtime.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
"""Declarative runtime/deployment metadata.
|
||||||
|
|
||||||
|
These types describe *how* the platform should run an agent: lifecycle,
|
||||||
|
state needs, isolation level, resource budget, egress policy. They are
|
||||||
|
read by the deployer and by the registry; agent code itself should not
|
||||||
|
depend on which runtime is selected.
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
from pydantic import BaseModel, ConfigDict, Field, NonNegativeInt, PositiveInt
|
||||||
|
|
||||||
|
|
||||||
|
class Lifecycle(str, Enum):
|
||||||
|
"""How long an instance of the agent process lives."""
|
||||||
|
|
||||||
|
EPHEMERAL = "ephemeral" # spawned per-invocation, torn down after
|
||||||
|
SESSION = "session" # spawned per-session, kept until the session ends
|
||||||
|
WARM = "warm" # long-running service, multiplexed across callers
|
||||||
|
|
||||||
|
|
||||||
|
class State(str, Enum):
|
||||||
|
"""What kind of state the agent retains between invocations."""
|
||||||
|
|
||||||
|
NONE = "none" # purely functional
|
||||||
|
SESSION = "session" # in-memory state for the lifetime of a session
|
||||||
|
DURABLE = "durable" # persisted across restarts (storage required)
|
||||||
|
|
||||||
|
|
||||||
|
class Sandbox(str, Enum):
|
||||||
|
"""Isolation level. The platform always runs agents under microsandbox.
|
||||||
|
|
||||||
|
Modeled as an enum (rather than a constant) so the wire format stays
|
||||||
|
stable if more isolation tiers are added later, but only one value is
|
||||||
|
currently valid: every agent runs in a microvm-class sandbox.
|
||||||
|
"""
|
||||||
|
|
||||||
|
MICROSANDBOX = "microsandbox"
|
||||||
|
|
||||||
|
|
||||||
|
class Resources(BaseModel):
|
||||||
|
"""Resource budget hint for the deployer."""
|
||||||
|
|
||||||
|
model_config = ConfigDict(extra="forbid", frozen=True)
|
||||||
|
|
||||||
|
cpu: str = "100m" # k8s-style CPU spec, e.g. "500m", "2"
|
||||||
|
memory: str = "256Mi" # k8s-style memory, e.g. "512Mi", "4Gi"
|
||||||
|
gpu: NonNegativeInt = 0
|
||||||
|
max_runtime_seconds: PositiveInt = 600
|
||||||
|
|
||||||
|
|
||||||
|
class SkillPolicy(BaseModel):
|
||||||
|
"""Per-skill operational policy advertised on the Agent Card."""
|
||||||
|
|
||||||
|
model_config = ConfigDict(extra="forbid", frozen=True)
|
||||||
|
|
||||||
|
timeout_seconds: float | None = None
|
||||||
|
idempotent: bool = False
|
||||||
|
max_retries: NonNegativeInt = 0
|
||||||
|
cost_class: str | None = None # informational, e.g. "cheap" / "expensive"
|
||||||
|
|
||||||
|
|
||||||
|
class EgressPolicy(BaseModel):
|
||||||
|
"""What external hosts the agent is allowed to talk to."""
|
||||||
|
|
||||||
|
model_config = ConfigDict(extra="forbid", frozen=True)
|
||||||
|
|
||||||
|
allow_hosts: tuple[str, ...] = ()
|
||||||
|
allow_internal_services: tuple[str, ...] = () # e.g. cluster service DNS
|
||||||
|
deny_internet_by_default: bool = True
|
||||||
|
|
||||||
|
|
||||||
|
class AgentRuntime(BaseModel):
|
||||||
|
"""Aggregate runtime declaration; published on the Agent Card."""
|
||||||
|
|
||||||
|
model_config = ConfigDict(extra="forbid", frozen=True)
|
||||||
|
|
||||||
|
lifecycle: Lifecycle = Lifecycle.EPHEMERAL
|
||||||
|
state: State = State.NONE
|
||||||
|
sandbox: Sandbox = Sandbox.MICROSANDBOX
|
||||||
|
resources: Resources = Field(default_factory=Resources)
|
||||||
|
concurrency: PositiveInt = 1
|
||||||
|
egress: EgressPolicy = Field(default_factory=EgressPolicy)
|
||||||
|
tools_used: tuple[str, ...] = ()
|
||||||
3
a2a_pack/serve/__init__.py
Normal file
3
a2a_pack/serve/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
from .asgi import build_app, serve
|
||||||
|
|
||||||
|
__all__ = ["build_app", "serve"]
|
||||||
104
a2a_pack/serve/asgi.py
Normal file
104
a2a_pack/serve/asgi.py
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
"""HTTP adapter that turns any :class:`A2AAgent` into a service.
|
||||||
|
|
||||||
|
This is intentionally minimal: it covers the surface needed to plug into
|
||||||
|
the wider A2A ecosystem and the platform's control plane.
|
||||||
|
|
||||||
|
Endpoints:
|
||||||
|
GET /healthz -> liveness
|
||||||
|
GET /.well-known/agent-card -> Agent Card JSON
|
||||||
|
POST /invoke/{skill} -> invoke skill (JSON in, JSON out)
|
||||||
|
|
||||||
|
Auth: a single bearer token is read from the ``A2A_API_KEY`` env var. If set,
|
||||||
|
all routes except ``/healthz`` and the card require ``Authorization: Bearer
|
||||||
|
<key>``. The bearer token is materialized into the agent's declared
|
||||||
|
``auth_model`` (best-effort: APIKeyAuth, otherwise NoAuth).
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from fastapi import FastAPI, Header, HTTPException
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from ..agent import A2AAgent, SkillInputError, SkillNotFound
|
||||||
|
from ..auth import APIKeyAuth, NoAuth
|
||||||
|
from ..context import LocalRunContext, MissingScopes
|
||||||
|
|
||||||
|
|
||||||
|
class _InvokeIn(BaseModel):
|
||||||
|
arguments: dict[str, Any] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def build_app(agent: A2AAgent) -> FastAPI:
|
||||||
|
"""Build a FastAPI app for the given agent instance."""
|
||||||
|
app = FastAPI(title=type(agent).name, version=type(agent).version)
|
||||||
|
api_key = os.environ.get("A2A_API_KEY")
|
||||||
|
|
||||||
|
def _build_auth(provided_key: str | None) -> Any:
|
||||||
|
auth_cls = type(agent).auth_model
|
||||||
|
if auth_cls is APIKeyAuth:
|
||||||
|
return APIKeyAuth(api_key_id=provided_key or "anonymous")
|
||||||
|
if auth_cls is NoAuth:
|
||||||
|
return NoAuth()
|
||||||
|
# Unknown auth model: try default-construct, else fail loudly.
|
||||||
|
try:
|
||||||
|
return auth_cls()
|
||||||
|
except Exception as exc: # pragma: no cover - depends on user model
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500,
|
||||||
|
detail=f"cannot materialize auth_model {auth_cls.__name__}: {exc}",
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
def _check_key(authorization: str | None) -> str | None:
|
||||||
|
if api_key is None:
|
||||||
|
return None
|
||||||
|
if not authorization or not authorization.lower().startswith("bearer "):
|
||||||
|
raise HTTPException(401, "missing bearer token")
|
||||||
|
token = authorization.split(None, 1)[1].strip()
|
||||||
|
if token != api_key:
|
||||||
|
raise HTTPException(401, "invalid bearer token")
|
||||||
|
return token
|
||||||
|
|
||||||
|
@app.get("/healthz")
|
||||||
|
async def healthz() -> dict[str, Any]:
|
||||||
|
ok = await agent.health()
|
||||||
|
return {"ok": ok, "agent": type(agent).name, "version": type(agent).version}
|
||||||
|
|
||||||
|
@app.get("/.well-known/agent-card")
|
||||||
|
async def agent_card() -> dict[str, Any]:
|
||||||
|
return agent.card().model_dump(mode="json")
|
||||||
|
|
||||||
|
@app.post("/invoke/{skill_name}")
|
||||||
|
async def invoke(
|
||||||
|
skill_name: str,
|
||||||
|
body: _InvokeIn,
|
||||||
|
authorization: str | None = Header(default=None),
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
token = _check_key(authorization)
|
||||||
|
ctx: LocalRunContext[Any] = LocalRunContext(
|
||||||
|
auth=_build_auth(token), task_id=f"http-{skill_name}"
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
result = await agent.invoke_json(skill_name, ctx, body.arguments)
|
||||||
|
except SkillNotFound:
|
||||||
|
raise HTTPException(404, f"unknown skill: {skill_name}")
|
||||||
|
except SkillInputError as exc:
|
||||||
|
raise HTTPException(400, str(exc))
|
||||||
|
except MissingScopes as exc:
|
||||||
|
raise HTTPException(403, str(exc))
|
||||||
|
return {
|
||||||
|
"result": result,
|
||||||
|
"events": [
|
||||||
|
{"kind": e.kind, "payload": e.payload} for e in ctx.events
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
|
def serve(agent: A2AAgent, *, host: str = "0.0.0.0", port: int = 8000) -> None:
|
||||||
|
"""Run the agent's HTTP server with uvicorn (blocking)."""
|
||||||
|
import uvicorn
|
||||||
|
|
||||||
|
uvicorn.run(build_app(agent), host=host, port=port, log_level="info")
|
||||||
446
a2a_pack/workspace.py
Normal file
446
a2a_pack/workspace.py
Normal file
@@ -0,0 +1,446 @@
|
|||||||
|
"""Workspace capability negotiation.
|
||||||
|
|
||||||
|
Agents never receive a filesystem path. They negotiate a *view* by intent::
|
||||||
|
|
||||||
|
view = await ctx.workspace.open_view(
|
||||||
|
purpose="Fix failing payment test",
|
||||||
|
hints=["payment", "checkout"],
|
||||||
|
file_types=["python"],
|
||||||
|
max_files=10,
|
||||||
|
mode=WorkspaceMode.READ_WRITE_OVERLAY,
|
||||||
|
)
|
||||||
|
for path in view.files:
|
||||||
|
content = await view.read(path)
|
||||||
|
|
||||||
|
The runtime resolves the request (semantic search + dependency graph + git
|
||||||
|
metadata + policy + optional human approval) and returns a bounded grant.
|
||||||
|
Writes are staged as :class:`WorkspacePatch` objects, never applied directly
|
||||||
|
to the host filesystem from inside the sandbox.
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from datetime import datetime
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Literal, Sequence
|
||||||
|
|
||||||
|
from pydantic import BaseModel, ConfigDict, NonNegativeInt, PositiveInt
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceMode(str, Enum):
|
||||||
|
READ_ONLY = "read_only"
|
||||||
|
READ_WRITE_OVERLAY = "read_write_overlay" # writes staged as patches
|
||||||
|
READ_WRITE_DIRECT = "read_write_direct" # discouraged; needs explicit policy
|
||||||
|
|
||||||
|
|
||||||
|
class FileType(str, Enum):
|
||||||
|
PYTHON = "python"
|
||||||
|
TYPESCRIPT = "typescript"
|
||||||
|
JAVASCRIPT = "javascript"
|
||||||
|
YAML = "yaml"
|
||||||
|
JSON = "json"
|
||||||
|
TOML = "toml"
|
||||||
|
MARKDOWN = "markdown"
|
||||||
|
SQL = "sql"
|
||||||
|
SHELL = "shell"
|
||||||
|
OTHER = "other"
|
||||||
|
|
||||||
|
|
||||||
|
class FileMatch(BaseModel):
|
||||||
|
"""Result row from :meth:`WorkspaceClient.search`."""
|
||||||
|
|
||||||
|
model_config = ConfigDict(extra="forbid", frozen=True)
|
||||||
|
|
||||||
|
path: str
|
||||||
|
file_type: FileType
|
||||||
|
score: float = 0.0
|
||||||
|
summary: str | None = None
|
||||||
|
size_bytes: NonNegativeInt = 0
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceGrant(BaseModel):
|
||||||
|
"""An approved access grant for a bounded set of files."""
|
||||||
|
|
||||||
|
model_config = ConfigDict(extra="forbid", frozen=True)
|
||||||
|
|
||||||
|
grant_id: str
|
||||||
|
purpose: str
|
||||||
|
files: tuple[FileMatch, ...]
|
||||||
|
mode: WorkspaceMode
|
||||||
|
reason: str
|
||||||
|
expires_at: datetime | None = None
|
||||||
|
requires_human_approval: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspacePatch(BaseModel):
|
||||||
|
"""A staged write. Not applied until the runtime/approver commits it."""
|
||||||
|
|
||||||
|
model_config = ConfigDict(extra="forbid", frozen=True)
|
||||||
|
|
||||||
|
grant_id: str
|
||||||
|
path: str
|
||||||
|
operation: Literal["create", "update", "delete"]
|
||||||
|
content: bytes | None = None # bytes for create/update, None for delete
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceAccess(BaseModel):
|
||||||
|
"""Class-level workspace policy.
|
||||||
|
|
||||||
|
Use :meth:`none` for agents that do not touch any workspace, or
|
||||||
|
:meth:`dynamic` to allow capability negotiation under bounds.
|
||||||
|
"""
|
||||||
|
|
||||||
|
model_config = ConfigDict(extra="forbid", frozen=True)
|
||||||
|
|
||||||
|
enabled: bool = False
|
||||||
|
max_files: NonNegativeInt = 0
|
||||||
|
allowed_modes: tuple[WorkspaceMode, ...] = ()
|
||||||
|
require_reason: bool = True
|
||||||
|
deny_patterns: tuple[str, ...] = ()
|
||||||
|
require_human_approval: bool = False
|
||||||
|
max_total_size_bytes: PositiveInt = 100 * 1024 * 1024
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def none(cls) -> "WorkspaceAccess":
|
||||||
|
return cls(enabled=False)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def dynamic(
|
||||||
|
cls,
|
||||||
|
*,
|
||||||
|
max_files: int = 25,
|
||||||
|
allowed_modes: Sequence[WorkspaceMode] = (WorkspaceMode.READ_ONLY,),
|
||||||
|
require_reason: bool = True,
|
||||||
|
deny_patterns: Sequence[str] = (),
|
||||||
|
require_human_approval: bool = False,
|
||||||
|
max_total_size_bytes: int = 100 * 1024 * 1024,
|
||||||
|
) -> "WorkspaceAccess":
|
||||||
|
return cls(
|
||||||
|
enabled=True,
|
||||||
|
max_files=max_files,
|
||||||
|
allowed_modes=tuple(allowed_modes),
|
||||||
|
require_reason=require_reason,
|
||||||
|
deny_patterns=tuple(deny_patterns),
|
||||||
|
require_human_approval=require_human_approval,
|
||||||
|
max_total_size_bytes=max_total_size_bytes,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceDenied(PermissionError):
|
||||||
|
"""Raised when a workspace request violates the agent's policy."""
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceView(ABC):
|
||||||
|
"""A bounded view over a granted set of files.
|
||||||
|
|
||||||
|
Returned by :meth:`WorkspaceClient.open_view`. Reads always go to the
|
||||||
|
granted view; writes return :class:`WorkspacePatch` objects that the
|
||||||
|
runtime will commit (or reject) outside the sandbox.
|
||||||
|
"""
|
||||||
|
|
||||||
|
grant: WorkspaceGrant
|
||||||
|
|
||||||
|
@property
|
||||||
|
def files(self) -> tuple[FileMatch, ...]:
|
||||||
|
return self.grant.files
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def read(self, path: str) -> bytes: ...
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def write(self, path: str, content: bytes) -> WorkspacePatch: ...
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def delete(self, path: str) -> WorkspacePatch: ...
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def patches(self) -> tuple[WorkspacePatch, ...]: ...
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceClient(ABC):
|
||||||
|
"""Negotiation surface handed to the agent via ``ctx.workspace``.
|
||||||
|
|
||||||
|
The concrete implementation is provided by the runtime; agents must
|
||||||
|
program against this interface only.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def search(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
query: str,
|
||||||
|
types: Sequence[FileType] = (),
|
||||||
|
limit: int = 20,
|
||||||
|
) -> list[FileMatch]: ...
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def request_access(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
files: Sequence[FileMatch | str],
|
||||||
|
mode: WorkspaceMode,
|
||||||
|
reason: str,
|
||||||
|
purpose: str = "",
|
||||||
|
) -> WorkspaceGrant: ...
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def open_view(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
purpose: str,
|
||||||
|
hints: Sequence[str] = (),
|
||||||
|
file_types: Sequence[FileType] = (),
|
||||||
|
max_files: int = 10,
|
||||||
|
mode: WorkspaceMode = WorkspaceMode.READ_ONLY,
|
||||||
|
reason: str | None = None,
|
||||||
|
) -> WorkspaceView: ...
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def list_grants(self) -> list[WorkspaceGrant]: ...
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Local in-memory implementation, for dev/tests.
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class LocalWorkspaceView(WorkspaceView):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
grant: WorkspaceGrant,
|
||||||
|
client: "LocalWorkspaceClient",
|
||||||
|
) -> None:
|
||||||
|
self.grant = grant
|
||||||
|
self._client = client
|
||||||
|
self._patches: list[WorkspacePatch] = []
|
||||||
|
self._granted_paths = {f.path for f in grant.files}
|
||||||
|
|
||||||
|
def _check(self, path: str) -> None:
|
||||||
|
if path not in self._granted_paths:
|
||||||
|
raise WorkspaceDenied(
|
||||||
|
f"path {path!r} not in grant {self.grant.grant_id}"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def read(self, path: str) -> bytes:
|
||||||
|
self._check(path)
|
||||||
|
return self._client._files[path]
|
||||||
|
|
||||||
|
async def write(self, path: str, content: bytes) -> WorkspacePatch:
|
||||||
|
if self.grant.mode is WorkspaceMode.READ_ONLY:
|
||||||
|
raise WorkspaceDenied(f"grant is read-only: {self.grant.grant_id}")
|
||||||
|
self._check(path)
|
||||||
|
op: Literal["create", "update"] = (
|
||||||
|
"create" if path not in self._client._files else "update"
|
||||||
|
)
|
||||||
|
patch = WorkspacePatch(
|
||||||
|
grant_id=self.grant.grant_id,
|
||||||
|
path=path,
|
||||||
|
operation=op,
|
||||||
|
content=content,
|
||||||
|
)
|
||||||
|
self._patches.append(patch)
|
||||||
|
return patch
|
||||||
|
|
||||||
|
async def delete(self, path: str) -> WorkspacePatch:
|
||||||
|
if self.grant.mode is WorkspaceMode.READ_ONLY:
|
||||||
|
raise WorkspaceDenied(f"grant is read-only: {self.grant.grant_id}")
|
||||||
|
self._check(path)
|
||||||
|
patch = WorkspacePatch(
|
||||||
|
grant_id=self.grant.grant_id,
|
||||||
|
path=path,
|
||||||
|
operation="delete",
|
||||||
|
content=None,
|
||||||
|
)
|
||||||
|
self._patches.append(patch)
|
||||||
|
return patch
|
||||||
|
|
||||||
|
async def patches(self) -> tuple[WorkspacePatch, ...]:
|
||||||
|
return tuple(self._patches)
|
||||||
|
|
||||||
|
|
||||||
|
class LocalWorkspaceClient(WorkspaceClient):
|
||||||
|
"""In-memory workspace for local dev and tests.
|
||||||
|
|
||||||
|
Search is naive substring match; ranking is keyword-overlap. Real
|
||||||
|
runtime implementations replace this with embeddings/dep-graph search.
|
||||||
|
Policy enforcement (:class:`WorkspaceAccess`) IS applied here so tests
|
||||||
|
cover the rejection paths.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_EXT_TO_TYPE: dict[str, FileType] = {
|
||||||
|
".py": FileType.PYTHON,
|
||||||
|
".ts": FileType.TYPESCRIPT,
|
||||||
|
".tsx": FileType.TYPESCRIPT,
|
||||||
|
".js": FileType.JAVASCRIPT,
|
||||||
|
".jsx": FileType.JAVASCRIPT,
|
||||||
|
".yaml": FileType.YAML,
|
||||||
|
".yml": FileType.YAML,
|
||||||
|
".json": FileType.JSON,
|
||||||
|
".toml": FileType.TOML,
|
||||||
|
".md": FileType.MARKDOWN,
|
||||||
|
".sql": FileType.SQL,
|
||||||
|
".sh": FileType.SHELL,
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
files: dict[str, bytes],
|
||||||
|
*,
|
||||||
|
access: WorkspaceAccess,
|
||||||
|
) -> None:
|
||||||
|
self._files: dict[str, bytes] = dict(files)
|
||||||
|
self._access = access
|
||||||
|
self._grants: dict[str, WorkspaceGrant] = {}
|
||||||
|
self._counter = 0
|
||||||
|
|
||||||
|
def _detect(self, path: str) -> FileType:
|
||||||
|
for ext, ft in self._EXT_TO_TYPE.items():
|
||||||
|
if path.endswith(ext):
|
||||||
|
return ft
|
||||||
|
return FileType.OTHER
|
||||||
|
|
||||||
|
def _denied(self, path: str) -> bool:
|
||||||
|
for pat in self._access.deny_patterns:
|
||||||
|
regex = re.compile(_glob_to_regex(pat))
|
||||||
|
if regex.fullmatch(path):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def search(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
query: str,
|
||||||
|
types: Sequence[FileType] = (),
|
||||||
|
limit: int = 20,
|
||||||
|
) -> list[FileMatch]:
|
||||||
|
if not self._access.enabled:
|
||||||
|
raise WorkspaceDenied("workspace disabled by policy")
|
||||||
|
type_set = set(types)
|
||||||
|
terms = [t.lower() for t in re.split(r"\W+", query) if t]
|
||||||
|
out: list[FileMatch] = []
|
||||||
|
for path, data in self._files.items():
|
||||||
|
if self._denied(path):
|
||||||
|
continue
|
||||||
|
ft = self._detect(path)
|
||||||
|
if type_set and ft not in type_set:
|
||||||
|
continue
|
||||||
|
haystack = (path + "\n" + data.decode("utf-8", errors="ignore")).lower()
|
||||||
|
score = sum(haystack.count(t) for t in terms)
|
||||||
|
if score == 0:
|
||||||
|
continue
|
||||||
|
out.append(
|
||||||
|
FileMatch(
|
||||||
|
path=path,
|
||||||
|
file_type=ft,
|
||||||
|
score=float(score),
|
||||||
|
size_bytes=len(data),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
out.sort(key=lambda m: -m.score)
|
||||||
|
return out[:limit]
|
||||||
|
|
||||||
|
async def request_access(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
files: Sequence[FileMatch | str],
|
||||||
|
mode: WorkspaceMode,
|
||||||
|
reason: str,
|
||||||
|
purpose: str = "",
|
||||||
|
) -> WorkspaceGrant:
|
||||||
|
if not self._access.enabled:
|
||||||
|
raise WorkspaceDenied("workspace disabled by policy")
|
||||||
|
if mode not in self._access.allowed_modes:
|
||||||
|
raise WorkspaceDenied(f"mode {mode.value!r} not in allowed_modes")
|
||||||
|
if self._access.require_reason and not reason.strip():
|
||||||
|
raise WorkspaceDenied("reason required by policy")
|
||||||
|
normalized: list[FileMatch] = []
|
||||||
|
for f in files:
|
||||||
|
if isinstance(f, str):
|
||||||
|
if f not in self._files:
|
||||||
|
raise WorkspaceDenied(f"unknown path: {f!r}")
|
||||||
|
normalized.append(
|
||||||
|
FileMatch(
|
||||||
|
path=f,
|
||||||
|
file_type=self._detect(f),
|
||||||
|
size_bytes=len(self._files[f]),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if f.path not in self._files:
|
||||||
|
raise WorkspaceDenied(f"unknown path: {f.path!r}")
|
||||||
|
normalized.append(f)
|
||||||
|
if len(normalized) > self._access.max_files:
|
||||||
|
raise WorkspaceDenied(
|
||||||
|
f"requested {len(normalized)} files, max_files={self._access.max_files}"
|
||||||
|
)
|
||||||
|
for m in normalized:
|
||||||
|
if self._denied(m.path):
|
||||||
|
raise WorkspaceDenied(f"path denied by policy: {m.path}")
|
||||||
|
total = sum(m.size_bytes for m in normalized)
|
||||||
|
if total > self._access.max_total_size_bytes:
|
||||||
|
raise WorkspaceDenied(
|
||||||
|
f"total size {total} exceeds max_total_size_bytes"
|
||||||
|
)
|
||||||
|
self._counter += 1
|
||||||
|
grant = WorkspaceGrant(
|
||||||
|
grant_id=f"grant-{self._counter}",
|
||||||
|
purpose=purpose,
|
||||||
|
files=tuple(normalized),
|
||||||
|
mode=mode,
|
||||||
|
reason=reason,
|
||||||
|
requires_human_approval=self._access.require_human_approval,
|
||||||
|
)
|
||||||
|
self._grants[grant.grant_id] = grant
|
||||||
|
return grant
|
||||||
|
|
||||||
|
async def open_view(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
purpose: str,
|
||||||
|
hints: Sequence[str] = (),
|
||||||
|
file_types: Sequence[FileType] = (),
|
||||||
|
max_files: int = 10,
|
||||||
|
mode: WorkspaceMode = WorkspaceMode.READ_ONLY,
|
||||||
|
reason: str | None = None,
|
||||||
|
) -> WorkspaceView:
|
||||||
|
query = " ".join([purpose, *hints])
|
||||||
|
matches = await self.search(query=query, types=file_types, limit=max_files * 3)
|
||||||
|
chosen = matches[:max_files]
|
||||||
|
grant = await self.request_access(
|
||||||
|
files=chosen,
|
||||||
|
mode=mode,
|
||||||
|
reason=reason or purpose,
|
||||||
|
purpose=purpose,
|
||||||
|
)
|
||||||
|
return LocalWorkspaceView(grant, self)
|
||||||
|
|
||||||
|
async def list_grants(self) -> list[WorkspaceGrant]:
|
||||||
|
return list(self._grants.values())
|
||||||
|
|
||||||
|
|
||||||
|
def _glob_to_regex(pattern: str) -> str:
|
||||||
|
"""Translate a simple ``foo/**/*.py`` style glob to a regex."""
|
||||||
|
out: list[str] = []
|
||||||
|
i = 0
|
||||||
|
while i < len(pattern):
|
||||||
|
c = pattern[i]
|
||||||
|
if c == "*":
|
||||||
|
if i + 1 < len(pattern) and pattern[i + 1] == "*":
|
||||||
|
out.append(".*")
|
||||||
|
i += 2
|
||||||
|
if i < len(pattern) and pattern[i] == "/":
|
||||||
|
i += 1
|
||||||
|
else:
|
||||||
|
out.append("[^/]*")
|
||||||
|
i += 1
|
||||||
|
elif c == "?":
|
||||||
|
out.append("[^/]")
|
||||||
|
i += 1
|
||||||
|
elif c == ".":
|
||||||
|
out.append(r"\.")
|
||||||
|
i += 1
|
||||||
|
else:
|
||||||
|
out.append(re.escape(c))
|
||||||
|
i += 1
|
||||||
|
return "".join(out)
|
||||||
101
examples/research_agent.py
Normal file
101
examples/research_agent.py
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
"""Example agent: full declarative surface + workspace negotiation."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from a2a_pack import (
|
||||||
|
A2AAgent,
|
||||||
|
EgressPolicy,
|
||||||
|
FileType,
|
||||||
|
JWTAuth,
|
||||||
|
Lifecycle,
|
||||||
|
LocalWorkspaceClient,
|
||||||
|
Resources,
|
||||||
|
RunContext,
|
||||||
|
State,
|
||||||
|
WorkspaceAccess,
|
||||||
|
WorkspaceMode,
|
||||||
|
skill,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ResearchConfig(BaseModel):
|
||||||
|
model: str = "qwen2.5-coder"
|
||||||
|
|
||||||
|
|
||||||
|
class ResearchSession(BaseModel):
|
||||||
|
history: list[str] = []
|
||||||
|
|
||||||
|
|
||||||
|
class ResearchAgent(A2AAgent[ResearchConfig, JWTAuth]):
|
||||||
|
name = "research-agent"
|
||||||
|
description = "Researches questions across a session, with workspace access"
|
||||||
|
|
||||||
|
config_model = ResearchConfig
|
||||||
|
auth_model = JWTAuth
|
||||||
|
|
||||||
|
lifecycle = Lifecycle.SESSION
|
||||||
|
state = State.SESSION
|
||||||
|
state_model = ResearchSession
|
||||||
|
resources = Resources(cpu="1", memory="1Gi", max_runtime_seconds=900)
|
||||||
|
egress = EgressPolicy(
|
||||||
|
allow_internal_services=("litellm.llm.svc.cluster.local",),
|
||||||
|
)
|
||||||
|
tools_used = ("litellm",)
|
||||||
|
required_secrets = ("LITELLM_API_KEY",)
|
||||||
|
capabilities = {"streaming": True, "artifacts": True}
|
||||||
|
|
||||||
|
workspace_access = WorkspaceAccess.dynamic(
|
||||||
|
max_files=8,
|
||||||
|
allowed_modes=(WorkspaceMode.READ_ONLY, WorkspaceMode.READ_WRITE_OVERLAY),
|
||||||
|
deny_patterns=("secrets/**", "**/.env"),
|
||||||
|
)
|
||||||
|
|
||||||
|
@skill(scopes=["research:run"], stream=True, timeout_seconds=120)
|
||||||
|
async def research(
|
||||||
|
self,
|
||||||
|
ctx: RunContext[JWTAuth],
|
||||||
|
query: str,
|
||||||
|
depth: int = 3,
|
||||||
|
) -> str:
|
||||||
|
await ctx.emit_progress(f"researching {query!r} for {ctx.auth.sub}")
|
||||||
|
|
||||||
|
view = await ctx.workspace.open_view(
|
||||||
|
purpose=f"Research support for: {query}",
|
||||||
|
hints=[query],
|
||||||
|
file_types=[FileType.PYTHON, FileType.MARKDOWN],
|
||||||
|
max_files=4,
|
||||||
|
mode=WorkspaceMode.READ_ONLY,
|
||||||
|
)
|
||||||
|
for fm in view.files:
|
||||||
|
await ctx.emit_text_delta(f"reading {fm.path}\n")
|
||||||
|
|
||||||
|
return f"Researched {query!r} at depth {depth} ({len(view.files)} files referenced)"
|
||||||
|
|
||||||
|
|
||||||
|
async def main() -> None:
|
||||||
|
agent = ResearchAgent()
|
||||||
|
print(agent.card().model_dump_json(indent=2))
|
||||||
|
print()
|
||||||
|
|
||||||
|
workspace = LocalWorkspaceClient(
|
||||||
|
files={
|
||||||
|
"src/quantum.py": b"# notes on quantum computing",
|
||||||
|
"docs/intro.md": b"# Quantum primer",
|
||||||
|
"secrets/.env": b"DO_NOT_LEAK=1",
|
||||||
|
},
|
||||||
|
access=ResearchAgent.workspace_access,
|
||||||
|
)
|
||||||
|
result = await agent.local_invoke(
|
||||||
|
"research",
|
||||||
|
auth=JWTAuth(sub="alice", scopes=["research:run"]),
|
||||||
|
workspace=workspace,
|
||||||
|
query="quantum computing",
|
||||||
|
)
|
||||||
|
print("RESULT:", result)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(main())
|
||||||
39
pyproject.toml
Normal file
39
pyproject.toml
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = ["hatchling"]
|
||||||
|
build-backend = "hatchling.build"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "a2a-pack"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Developer SDK + CLI for building, packaging, and deploying A2A agents"
|
||||||
|
requires-python = ">=3.11"
|
||||||
|
dependencies = [
|
||||||
|
"pydantic>=2.6",
|
||||||
|
"fastapi>=0.110",
|
||||||
|
"uvicorn[standard]>=0.27",
|
||||||
|
"typer>=0.12",
|
||||||
|
"rich>=13",
|
||||||
|
"PyYAML>=6",
|
||||||
|
"jinja2>=3",
|
||||||
|
"httpx>=0.27",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
dev = [
|
||||||
|
"pytest>=8",
|
||||||
|
"pytest-asyncio>=0.23",
|
||||||
|
"httpx>=0.27",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
a2a = "a2a_pack.cli.main:app"
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.wheel]
|
||||||
|
packages = ["a2a_pack"]
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.wheel.shared-data]
|
||||||
|
"a2a_pack/cli/templates" = "a2a_pack/cli/templates"
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
asyncio_mode = "auto"
|
||||||
|
testpaths = ["tests"]
|
||||||
468
tests/test_agent.py
Normal file
468
tests/test_agent.py
Normal file
@@ -0,0 +1,468 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from a2a_pack import (
|
||||||
|
A2AAgent,
|
||||||
|
AgentCard,
|
||||||
|
APIKeyAuth,
|
||||||
|
EgressPolicy,
|
||||||
|
JWTAuth,
|
||||||
|
Lifecycle,
|
||||||
|
LocalRunContext,
|
||||||
|
MissingScopes,
|
||||||
|
NoAuth,
|
||||||
|
Resources,
|
||||||
|
RunContext,
|
||||||
|
Sandbox,
|
||||||
|
SkillInputError,
|
||||||
|
SkillInvocationError,
|
||||||
|
SkillNotFound,
|
||||||
|
State,
|
||||||
|
skill,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class _GreeterConfig(BaseModel):
|
||||||
|
suffix: str = "!"
|
||||||
|
|
||||||
|
|
||||||
|
class _Greeter(A2AAgent[_GreeterConfig, NoAuth]):
|
||||||
|
name = "greeter"
|
||||||
|
description = "Says hi"
|
||||||
|
config_model = _GreeterConfig
|
||||||
|
auth_model = NoAuth
|
||||||
|
|
||||||
|
@skill(description="Greet someone")
|
||||||
|
async def greet(self, ctx: RunContext[NoAuth], who: str, loud: bool = False) -> str:
|
||||||
|
await ctx.emit_progress(f"greeting {who}")
|
||||||
|
out = f"hello {who}{self.config.suffix}"
|
||||||
|
return out.upper() if loud else out
|
||||||
|
|
||||||
|
@skill(name="boom", description="Fails on purpose")
|
||||||
|
async def _boom(self, ctx: RunContext[NoAuth]) -> str:
|
||||||
|
raise ValueError("nope")
|
||||||
|
|
||||||
|
|
||||||
|
def _ctx() -> LocalRunContext[NoAuth]:
|
||||||
|
return LocalRunContext(auth=NoAuth())
|
||||||
|
|
||||||
|
|
||||||
|
# --- subclass / decorator validation ---
|
||||||
|
|
||||||
|
def test_subclass_without_name_rejected():
|
||||||
|
with pytest.raises(TypeError, match="name must be set"):
|
||||||
|
|
||||||
|
class _Bad(A2AAgent):
|
||||||
|
description = "missing name"
|
||||||
|
|
||||||
|
|
||||||
|
def test_skill_requires_async():
|
||||||
|
with pytest.raises(TypeError, match="async"):
|
||||||
|
|
||||||
|
class _Sync(A2AAgent):
|
||||||
|
name = "sync"
|
||||||
|
|
||||||
|
@skill(description="sync handler")
|
||||||
|
def hi(self, ctx: RunContext[NoAuth]) -> str: # type: ignore[misc]
|
||||||
|
return "hi"
|
||||||
|
|
||||||
|
|
||||||
|
def test_skill_requires_run_context_param():
|
||||||
|
with pytest.raises(TypeError, match="RunContext"):
|
||||||
|
|
||||||
|
class _NoCtx(A2AAgent):
|
||||||
|
name = "noctx"
|
||||||
|
|
||||||
|
@skill(description="missing ctx")
|
||||||
|
async def hi(self, who: str) -> str:
|
||||||
|
return who
|
||||||
|
|
||||||
|
|
||||||
|
def test_skill_rejects_var_args():
|
||||||
|
with pytest.raises(TypeError, match=r"\*args"):
|
||||||
|
|
||||||
|
class _Va(A2AAgent):
|
||||||
|
name = "va"
|
||||||
|
|
||||||
|
@skill()
|
||||||
|
async def hi(self, ctx: RunContext[NoAuth], *args: str) -> str:
|
||||||
|
return ",".join(args)
|
||||||
|
|
||||||
|
|
||||||
|
def test_skill_rejects_var_kwargs():
|
||||||
|
with pytest.raises(TypeError, match=r"\*\*kwargs"):
|
||||||
|
|
||||||
|
class _Vk(A2AAgent):
|
||||||
|
name = "vk"
|
||||||
|
|
||||||
|
@skill()
|
||||||
|
async def hi(self, ctx: RunContext[NoAuth], **kwargs: str) -> str:
|
||||||
|
return str(kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def test_skill_rejects_untyped_param():
|
||||||
|
with pytest.raises(TypeError, match="missing a type annotation"):
|
||||||
|
|
||||||
|
class _U(A2AAgent):
|
||||||
|
name = "u"
|
||||||
|
|
||||||
|
@skill()
|
||||||
|
async def hi(self, ctx: RunContext[NoAuth], who) -> str: # type: ignore[no-untyped-def]
|
||||||
|
return who
|
||||||
|
|
||||||
|
|
||||||
|
def test_skill_rejects_reserved_name():
|
||||||
|
with pytest.raises(TypeError, match="reserved"):
|
||||||
|
|
||||||
|
class _R(A2AAgent):
|
||||||
|
name = "r"
|
||||||
|
|
||||||
|
@skill()
|
||||||
|
async def hi(self, ctx: RunContext[NoAuth], context: str) -> str:
|
||||||
|
return context
|
||||||
|
|
||||||
|
|
||||||
|
def test_duplicate_skill_name_rejected():
|
||||||
|
with pytest.raises(TypeError, match="duplicate skill name"):
|
||||||
|
|
||||||
|
class _Dup(A2AAgent):
|
||||||
|
name = "dup"
|
||||||
|
|
||||||
|
@skill(name="x")
|
||||||
|
async def a(self, ctx: RunContext[NoAuth]) -> str:
|
||||||
|
return "a"
|
||||||
|
|
||||||
|
@skill(name="x")
|
||||||
|
async def b(self, ctx: RunContext[NoAuth]) -> str:
|
||||||
|
return "b"
|
||||||
|
|
||||||
|
|
||||||
|
# --- card / metadata ---
|
||||||
|
|
||||||
|
def test_skills_collected_with_metadata():
|
||||||
|
g = _Greeter()
|
||||||
|
assert set(g.skills) == {"greet", "boom"}
|
||||||
|
assert g.skills["greet"].description == "Greet someone"
|
||||||
|
|
||||||
|
|
||||||
|
def test_card_omits_ctx_param():
|
||||||
|
card = _Greeter().card()
|
||||||
|
assert isinstance(card, AgentCard)
|
||||||
|
greet = next(s for s in card.skills if s.name == "greet")
|
||||||
|
assert "ctx" not in greet.input_schema["properties"]
|
||||||
|
assert greet.input_schema["required"] == ["who"]
|
||||||
|
assert greet.input_schema["additionalProperties"] is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_card_includes_deploy_metadata():
|
||||||
|
class _Cfg(BaseModel):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class _Meta(A2AAgent[_Cfg, NoAuth]):
|
||||||
|
name = "meta"
|
||||||
|
description = "metadata showcase"
|
||||||
|
required_secrets = ("OPENAI_KEY",)
|
||||||
|
required_env = ("REGION",)
|
||||||
|
capabilities = {"streaming": True}
|
||||||
|
input_modes = ("application/json", "text/plain")
|
||||||
|
output_modes = ("application/json",)
|
||||||
|
|
||||||
|
@skill()
|
||||||
|
async def noop(self, ctx: RunContext[NoAuth]) -> str:
|
||||||
|
return "ok"
|
||||||
|
|
||||||
|
card = _Meta().card()
|
||||||
|
assert card.required_secrets == ["OPENAI_KEY"]
|
||||||
|
assert card.required_env == ["REGION"]
|
||||||
|
assert card.capabilities == {"streaming": True}
|
||||||
|
assert card.input_modes == ["application/json", "text/plain"]
|
||||||
|
|
||||||
|
|
||||||
|
# --- config hydration ---
|
||||||
|
|
||||||
|
def test_default_config_constructed_when_none():
|
||||||
|
g = _Greeter()
|
||||||
|
assert g.config.suffix == "!"
|
||||||
|
|
||||||
|
|
||||||
|
def test_explicit_config_used():
|
||||||
|
g = _Greeter(_GreeterConfig(suffix="?!"))
|
||||||
|
assert g.config.suffix == "?!"
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_accepts_dict():
|
||||||
|
g = _Greeter({"suffix": "?"})
|
||||||
|
assert g.config.suffix == "?"
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_dict_validation_errors_propagate():
|
||||||
|
with pytest.raises(Exception):
|
||||||
|
_Greeter({"suffix": 123, "extra": True}) # type: ignore[arg-type]
|
||||||
|
|
||||||
|
|
||||||
|
# --- invocation ---
|
||||||
|
|
||||||
|
async def test_invoke_passes_ctx_and_returns_value():
|
||||||
|
g = _Greeter()
|
||||||
|
ctx = _ctx()
|
||||||
|
assert await g.invoke("greet", ctx, who="bob") == "hello bob!"
|
||||||
|
assert any(e.kind == "progress" for e in ctx.events)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_invoke_validates_input_types():
|
||||||
|
g = _Greeter()
|
||||||
|
with pytest.raises(SkillInputError):
|
||||||
|
await g.invoke("greet", _ctx(), who=123) # type: ignore[arg-type]
|
||||||
|
|
||||||
|
|
||||||
|
async def test_invoke_rejects_unknown_param():
|
||||||
|
g = _Greeter()
|
||||||
|
with pytest.raises(SkillInputError, match="unknown parameter"):
|
||||||
|
await g.invoke("greet", _ctx(), who="bob", extra="nope")
|
||||||
|
|
||||||
|
|
||||||
|
async def test_invoke_missing_required_param():
|
||||||
|
g = _Greeter()
|
||||||
|
with pytest.raises(SkillInputError, match="missing required"):
|
||||||
|
await g.invoke("greet", _ctx())
|
||||||
|
|
||||||
|
|
||||||
|
async def test_invoke_unknown_skill_raises():
|
||||||
|
g = _Greeter()
|
||||||
|
with pytest.raises(SkillNotFound):
|
||||||
|
await g.invoke("nope", _ctx())
|
||||||
|
|
||||||
|
|
||||||
|
async def test_invoke_handler_error_wrapped():
|
||||||
|
g = _Greeter()
|
||||||
|
with pytest.raises(SkillInvocationError, match="ValueError"):
|
||||||
|
await g.invoke("boom", _ctx())
|
||||||
|
|
||||||
|
|
||||||
|
class _Pair(BaseModel):
|
||||||
|
a: int
|
||||||
|
b: int
|
||||||
|
|
||||||
|
|
||||||
|
class _PairCfg(BaseModel):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class _PairAgent(A2AAgent[_PairCfg, NoAuth]):
|
||||||
|
name = "json-out"
|
||||||
|
description = ""
|
||||||
|
|
||||||
|
@skill()
|
||||||
|
async def make(self, ctx: RunContext[NoAuth]) -> _Pair:
|
||||||
|
return _Pair(a=1, b=2)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_invoke_json_returns_serializable():
|
||||||
|
out = await _PairAgent().invoke_json("make", LocalRunContext(auth=NoAuth()), {})
|
||||||
|
assert out == {"a": 1, "b": 2}
|
||||||
|
|
||||||
|
|
||||||
|
# --- scopes ---
|
||||||
|
|
||||||
|
async def test_scope_enforcement_blocks_caller():
|
||||||
|
class _Cfg(BaseModel):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class _S(A2AAgent[_Cfg, JWTAuth]):
|
||||||
|
name = "scoped-agent"
|
||||||
|
description = ""
|
||||||
|
auth_model = JWTAuth
|
||||||
|
|
||||||
|
@skill(scopes=["admin"])
|
||||||
|
async def secret_op(self, ctx: RunContext[JWTAuth]) -> str:
|
||||||
|
return "ok"
|
||||||
|
|
||||||
|
bad = LocalRunContext(auth=JWTAuth(sub="alice", scopes=["read"]))
|
||||||
|
with pytest.raises(MissingScopes):
|
||||||
|
await _S().invoke("secret_op", bad)
|
||||||
|
|
||||||
|
good = LocalRunContext(auth=JWTAuth(sub="alice", scopes=["admin"]))
|
||||||
|
assert await _S().invoke("secret_op", good) == "ok"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_scope_enforcement_allows_no_scope_skill():
|
||||||
|
g = _Greeter()
|
||||||
|
assert await g.invoke("greet", _ctx(), who="bob") == "hello bob!"
|
||||||
|
|
||||||
|
|
||||||
|
# --- streaming helpers ---
|
||||||
|
|
||||||
|
async def test_stream_helpers_emit_typed_events():
|
||||||
|
ctx = _ctx()
|
||||||
|
await ctx.emit_text_delta("hello ")
|
||||||
|
await ctx.emit_text_delta("world")
|
||||||
|
await ctx.emit_error("uh oh", code="E001")
|
||||||
|
kinds = [e.kind for e in ctx.events]
|
||||||
|
assert kinds == ["text_delta", "text_delta", "error"]
|
||||||
|
assert ctx.events[-1].payload == {"message": "uh oh", "code": "E001"}
|
||||||
|
|
||||||
|
|
||||||
|
# --- health ---
|
||||||
|
|
||||||
|
async def test_default_health_is_true():
|
||||||
|
assert await _Greeter().health() is True
|
||||||
|
|
||||||
|
|
||||||
|
# --- local_invoke ---
|
||||||
|
|
||||||
|
async def test_local_invoke_default_no_auth():
|
||||||
|
g = _Greeter()
|
||||||
|
assert await g.local_invoke("greet", who="bob") == "hello bob!"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_local_invoke_with_explicit_auth():
|
||||||
|
class _Cfg(BaseModel):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class _A(A2AAgent[_Cfg, JWTAuth]):
|
||||||
|
name = "auth-test"
|
||||||
|
description = ""
|
||||||
|
auth_model = JWTAuth
|
||||||
|
|
||||||
|
@skill()
|
||||||
|
async def whoami(self, ctx: RunContext[JWTAuth]) -> str:
|
||||||
|
return f"{ctx.auth.sub}@{ctx.auth.org_id}"
|
||||||
|
|
||||||
|
out = await _A().local_invoke(
|
||||||
|
"whoami", auth=JWTAuth(sub="alice", org_id="acme")
|
||||||
|
)
|
||||||
|
assert out == "alice@acme"
|
||||||
|
|
||||||
|
|
||||||
|
# --- artifacts / inheritance ---
|
||||||
|
|
||||||
|
async def test_artifact_round_trip():
|
||||||
|
class _Cfg(BaseModel):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class _A(A2AAgent[_Cfg, NoAuth]):
|
||||||
|
name = "art"
|
||||||
|
description = ""
|
||||||
|
|
||||||
|
@skill()
|
||||||
|
async def write(self, ctx: RunContext[NoAuth], body: str) -> str:
|
||||||
|
ref = await ctx.write_artifact("note.txt", body.encode(), "text/plain")
|
||||||
|
await ctx.emit_artifact(ref)
|
||||||
|
return ref.uri
|
||||||
|
|
||||||
|
ctx = _ctx()
|
||||||
|
uri = await _A().invoke("write", ctx, body="hello")
|
||||||
|
assert uri.startswith("memory://")
|
||||||
|
assert ctx.artifacts["note.txt"] == b"hello"
|
||||||
|
assert any(e.kind == "artifact" for e in ctx.events)
|
||||||
|
|
||||||
|
|
||||||
|
def test_skill_inheritance_preserves_parent_skills():
|
||||||
|
class _Loud(_Greeter):
|
||||||
|
name = "loud-greeter"
|
||||||
|
|
||||||
|
@skill(description="Shout")
|
||||||
|
async def shout(self, ctx: RunContext[NoAuth], what: str) -> str:
|
||||||
|
return what.upper()
|
||||||
|
|
||||||
|
skills = _Loud().skills
|
||||||
|
assert set(skills) == {"greet", "boom", "shout"}
|
||||||
|
|
||||||
|
|
||||||
|
# --- runtime metadata ---
|
||||||
|
|
||||||
|
|
||||||
|
class _SessionState(BaseModel):
|
||||||
|
history: list[str] = []
|
||||||
|
|
||||||
|
|
||||||
|
def test_default_runtime_is_ephemeral_no_state_microsandbox():
|
||||||
|
rt = _Greeter.runtime()
|
||||||
|
assert rt.lifecycle is Lifecycle.EPHEMERAL
|
||||||
|
assert rt.state is State.NONE
|
||||||
|
assert rt.sandbox is Sandbox.MICROSANDBOX # safe-by-default
|
||||||
|
assert rt.concurrency == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_state_requires_state_model():
|
||||||
|
with pytest.raises(TypeError, match="state_model"):
|
||||||
|
|
||||||
|
class _Bad(A2AAgent):
|
||||||
|
name = "bad"
|
||||||
|
description = ""
|
||||||
|
state = State.SESSION
|
||||||
|
|
||||||
|
|
||||||
|
def test_ephemeral_lifecycle_incompatible_with_session_state():
|
||||||
|
with pytest.raises(TypeError, match="ephemeral.*session"):
|
||||||
|
|
||||||
|
class _Bad(A2AAgent):
|
||||||
|
name = "bad"
|
||||||
|
description = ""
|
||||||
|
lifecycle = Lifecycle.EPHEMERAL
|
||||||
|
state = State.SESSION
|
||||||
|
state_model = _SessionState
|
||||||
|
|
||||||
|
|
||||||
|
def test_runtime_metadata_propagates_to_card():
|
||||||
|
class _ChatCfg(BaseModel):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class _Chat(A2AAgent[_ChatCfg, JWTAuth]):
|
||||||
|
name = "chat"
|
||||||
|
description = "stateful chat agent"
|
||||||
|
auth_model = JWTAuth
|
||||||
|
lifecycle = Lifecycle.SESSION
|
||||||
|
state = State.SESSION
|
||||||
|
state_model = _SessionState
|
||||||
|
resources = Resources(cpu="2", memory="4Gi", gpu=0, max_runtime_seconds=1800)
|
||||||
|
concurrency = 4
|
||||||
|
egress = EgressPolicy(
|
||||||
|
allow_hosts=("api.openai.com",),
|
||||||
|
allow_internal_services=("litellm.llm.svc.cluster.local",),
|
||||||
|
)
|
||||||
|
tools_used = ("litellm", "minio")
|
||||||
|
|
||||||
|
@skill(timeout_seconds=30, idempotent=True, max_retries=2, cost_class="cheap")
|
||||||
|
async def reply(self, ctx: RunContext[JWTAuth], message: str) -> str:
|
||||||
|
return f"echo {message}"
|
||||||
|
|
||||||
|
card = _Chat().card()
|
||||||
|
assert card.runtime.lifecycle is Lifecycle.SESSION
|
||||||
|
assert card.runtime.state is State.SESSION
|
||||||
|
assert card.runtime.sandbox is Sandbox.MICROSANDBOX
|
||||||
|
assert card.runtime.resources.cpu == "2"
|
||||||
|
assert card.runtime.concurrency == 4
|
||||||
|
assert card.runtime.egress.allow_hosts == ("api.openai.com",)
|
||||||
|
assert card.runtime.tools_used == ("litellm", "minio")
|
||||||
|
assert card.state_schema is not None
|
||||||
|
assert "history" in card.state_schema["properties"]
|
||||||
|
|
||||||
|
skill_card = card.skills[0]
|
||||||
|
assert skill_card.policy.timeout_seconds == 30
|
||||||
|
assert skill_card.policy.idempotent is True
|
||||||
|
assert skill_card.policy.max_retries == 2
|
||||||
|
assert skill_card.policy.cost_class == "cheap"
|
||||||
|
|
||||||
|
|
||||||
|
def test_skill_metadata_propagates_to_card():
|
||||||
|
class _ScopedConfig(BaseModel):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class _Scoped(A2AAgent[_ScopedConfig, APIKeyAuth]):
|
||||||
|
name = "scoped"
|
||||||
|
description = "scope test"
|
||||||
|
config_model = _ScopedConfig
|
||||||
|
auth_model = APIKeyAuth
|
||||||
|
|
||||||
|
@skill(scopes=["a:read", "a:write"], stream=True, tags=["x"])
|
||||||
|
async def do(self, ctx: RunContext[APIKeyAuth]) -> str:
|
||||||
|
return "ok"
|
||||||
|
|
||||||
|
card = _Scoped().card()
|
||||||
|
s = card.skills[0]
|
||||||
|
assert s.scopes == ["a:read", "a:write"]
|
||||||
|
assert s.stream is True
|
||||||
|
assert s.tags == ["x"]
|
||||||
199
tests/test_workspace.py
Normal file
199
tests/test_workspace.py
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from a2a_pack import (
|
||||||
|
A2AAgent,
|
||||||
|
FileType,
|
||||||
|
LocalWorkspaceClient,
|
||||||
|
NoAuth,
|
||||||
|
RunContext,
|
||||||
|
WorkspaceAccess,
|
||||||
|
WorkspaceDenied,
|
||||||
|
WorkspaceMode,
|
||||||
|
skill,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_FILES: dict[str, bytes] = {
|
||||||
|
"src/auth/login.py": b"def login(jwt): ... # JWT auth middleware",
|
||||||
|
"src/auth/middleware.py": b"# auth middleware for JWT validation",
|
||||||
|
"src/payments/checkout.py": b"def checkout(): ... # payment flow",
|
||||||
|
"tests/test_auth.py": b"def test_login_jwt(): ...",
|
||||||
|
"configs/app.toml": b"[auth]\njwt = true",
|
||||||
|
"secrets/.env": b"DB_PASSWORD=oops",
|
||||||
|
"README.md": b"# project",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class _Cfg(BaseModel):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class _CoderAgent(A2AAgent[_Cfg, NoAuth]):
|
||||||
|
name = "coder"
|
||||||
|
description = "Edits code by negotiated views"
|
||||||
|
workspace_access = WorkspaceAccess.dynamic(
|
||||||
|
max_files=5,
|
||||||
|
allowed_modes=(
|
||||||
|
WorkspaceMode.READ_ONLY,
|
||||||
|
WorkspaceMode.READ_WRITE_OVERLAY,
|
||||||
|
),
|
||||||
|
require_reason=True,
|
||||||
|
deny_patterns=("secrets/**", ".env", "**/.env"),
|
||||||
|
)
|
||||||
|
|
||||||
|
@skill()
|
||||||
|
async def find_and_patch_auth(self, ctx: RunContext[NoAuth]) -> int:
|
||||||
|
view = await ctx.workspace.open_view(
|
||||||
|
purpose="Fix JWT login bug",
|
||||||
|
hints=["auth", "jwt", "login"],
|
||||||
|
file_types=[FileType.PYTHON],
|
||||||
|
max_files=3,
|
||||||
|
mode=WorkspaceMode.READ_WRITE_OVERLAY,
|
||||||
|
)
|
||||||
|
for fm in view.files:
|
||||||
|
content = await view.read(fm.path)
|
||||||
|
await view.write(fm.path, content + b"\n# patched\n")
|
||||||
|
return len(view.files)
|
||||||
|
|
||||||
|
|
||||||
|
def _client() -> LocalWorkspaceClient:
|
||||||
|
return LocalWorkspaceClient(_FILES, access=_CoderAgent.workspace_access)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_open_view_grants_relevant_files_only():
|
||||||
|
agent = _CoderAgent()
|
||||||
|
n = await agent.local_invoke("find_and_patch_auth", workspace=_client())
|
||||||
|
assert n >= 1
|
||||||
|
assert n <= 3
|
||||||
|
|
||||||
|
|
||||||
|
async def test_search_excludes_denied_patterns():
|
||||||
|
ws = _client()
|
||||||
|
matches = await ws.search(query="DB_PASSWORD env secret", limit=20)
|
||||||
|
assert all("secrets/" not in m.path for m in matches)
|
||||||
|
assert all(not m.path.endswith(".env") for m in matches)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_request_access_rejects_denied_path():
|
||||||
|
ws = _client()
|
||||||
|
with pytest.raises(WorkspaceDenied, match="denied by policy"):
|
||||||
|
await ws.request_access(
|
||||||
|
files=["secrets/.env"],
|
||||||
|
mode=WorkspaceMode.READ_ONLY,
|
||||||
|
reason="trying to read secrets",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_request_access_rejects_disallowed_mode():
|
||||||
|
ws = _client()
|
||||||
|
with pytest.raises(WorkspaceDenied, match="not in allowed_modes"):
|
||||||
|
await ws.request_access(
|
||||||
|
files=["src/auth/login.py"],
|
||||||
|
mode=WorkspaceMode.READ_WRITE_DIRECT,
|
||||||
|
reason="needs direct write",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_request_access_requires_reason():
|
||||||
|
ws = _client()
|
||||||
|
with pytest.raises(WorkspaceDenied, match="reason required"):
|
||||||
|
await ws.request_access(
|
||||||
|
files=["src/auth/login.py"],
|
||||||
|
mode=WorkspaceMode.READ_ONLY,
|
||||||
|
reason="",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_request_access_enforces_max_files():
|
||||||
|
ws = _client()
|
||||||
|
paths = [p for p in _FILES if not p.startswith("secrets") and not p.endswith(".env")]
|
||||||
|
assert len(paths) > 5 # confirm fixture
|
||||||
|
with pytest.raises(WorkspaceDenied, match="max_files"):
|
||||||
|
await ws.request_access(
|
||||||
|
files=paths[:6],
|
||||||
|
mode=WorkspaceMode.READ_ONLY,
|
||||||
|
reason="too many",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_writes_are_staged_as_patches_not_applied():
|
||||||
|
ws = _client()
|
||||||
|
grant = await ws.request_access(
|
||||||
|
files=["src/auth/login.py"],
|
||||||
|
mode=WorkspaceMode.READ_WRITE_OVERLAY,
|
||||||
|
reason="patch",
|
||||||
|
)
|
||||||
|
from a2a_pack.workspace import LocalWorkspaceView
|
||||||
|
|
||||||
|
view = LocalWorkspaceView(grant, ws)
|
||||||
|
patch = await view.write("src/auth/login.py", b"new content")
|
||||||
|
assert patch.operation == "update"
|
||||||
|
assert patch.content == b"new content"
|
||||||
|
# original file untouched in the in-memory store
|
||||||
|
assert ws._files["src/auth/login.py"].startswith(b"def login")
|
||||||
|
patches = await view.patches()
|
||||||
|
assert len(patches) == 1
|
||||||
|
|
||||||
|
|
||||||
|
async def test_view_rejects_writes_in_read_only_mode():
|
||||||
|
ws = _client()
|
||||||
|
grant = await ws.request_access(
|
||||||
|
files=["src/auth/login.py"],
|
||||||
|
mode=WorkspaceMode.READ_ONLY,
|
||||||
|
reason="reading",
|
||||||
|
)
|
||||||
|
from a2a_pack.workspace import LocalWorkspaceView
|
||||||
|
|
||||||
|
view = LocalWorkspaceView(grant, ws)
|
||||||
|
with pytest.raises(WorkspaceDenied, match="read-only"):
|
||||||
|
await view.write("src/auth/login.py", b"x")
|
||||||
|
|
||||||
|
|
||||||
|
async def test_view_rejects_path_outside_grant():
|
||||||
|
ws = _client()
|
||||||
|
grant = await ws.request_access(
|
||||||
|
files=["src/auth/login.py"],
|
||||||
|
mode=WorkspaceMode.READ_WRITE_OVERLAY,
|
||||||
|
reason="patching",
|
||||||
|
)
|
||||||
|
from a2a_pack.workspace import LocalWorkspaceView
|
||||||
|
|
||||||
|
view = LocalWorkspaceView(grant, ws)
|
||||||
|
with pytest.raises(WorkspaceDenied, match="not in grant"):
|
||||||
|
await view.read("src/payments/checkout.py")
|
||||||
|
|
||||||
|
|
||||||
|
async def test_workspace_disabled_by_default():
|
||||||
|
from a2a_pack import SkillInvocationError
|
||||||
|
|
||||||
|
class _Plain(A2AAgent):
|
||||||
|
name = "plain"
|
||||||
|
description = ""
|
||||||
|
|
||||||
|
@skill()
|
||||||
|
async def touch(self, ctx: RunContext[NoAuth]) -> str:
|
||||||
|
await ctx.workspace.search(query="x")
|
||||||
|
return "ok"
|
||||||
|
|
||||||
|
agent = _Plain()
|
||||||
|
with pytest.raises(SkillInvocationError) as ei:
|
||||||
|
await agent.local_invoke("touch")
|
||||||
|
assert isinstance(ei.value.__cause__, PermissionError)
|
||||||
|
|
||||||
|
|
||||||
|
def test_workspace_access_propagates_to_card():
|
||||||
|
card = _CoderAgent().card()
|
||||||
|
wa = card.workspace_access
|
||||||
|
assert wa.enabled is True
|
||||||
|
assert wa.max_files == 5
|
||||||
|
assert WorkspaceMode.READ_WRITE_OVERLAY in wa.allowed_modes
|
||||||
|
assert "secrets/**" in wa.deny_patterns
|
||||||
|
|
||||||
|
|
||||||
|
def test_default_sandbox_is_microsandbox():
|
||||||
|
from a2a_pack import Sandbox
|
||||||
|
|
||||||
|
assert _CoderAgent.runtime().sandbox is Sandbox.MICROSANDBOX
|
||||||
Reference in New Issue
Block a user