Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions agent/transports/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,3 +45,7 @@ def _discover_transports() -> None:
import agent.transports.chat_completions # noqa: F401
except ImportError:
pass
try:
import agent.transports.bedrock # noqa: F401
except ImportError:
pass
154 changes: 154 additions & 0 deletions agent/transports/bedrock.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,154 @@
"""AWS Bedrock Converse API transport.

Delegates to the existing adapter functions in agent/bedrock_adapter.py.
Bedrock uses its own boto3 client (not the OpenAI SDK), so the transport
owns format conversion and normalization, while client construction and
boto3 calls stay on AIAgent.
"""

from typing import Any, Dict, List, Optional

from agent.transports.base import ProviderTransport
from agent.transports.types import NormalizedResponse, ToolCall, Usage


class BedrockTransport(ProviderTransport):
"""Transport for api_mode='bedrock_converse'."""

@property
def api_mode(self) -> str:
return "bedrock_converse"

def convert_messages(self, messages: List[Dict[str, Any]], **kwargs) -> Any:
"""Convert OpenAI messages to Bedrock Converse format."""
from agent.bedrock_adapter import convert_messages_to_converse
return convert_messages_to_converse(messages)

def convert_tools(self, tools: List[Dict[str, Any]]) -> Any:
"""Convert OpenAI tool schemas to Bedrock Converse toolConfig."""
from agent.bedrock_adapter import convert_tools_to_converse
return convert_tools_to_converse(tools)

def build_kwargs(
self,
model: str,
messages: List[Dict[str, Any]],
tools: Optional[List[Dict[str, Any]]] = None,
**params,
) -> Dict[str, Any]:
"""Build Bedrock converse() kwargs.

Calls convert_messages and convert_tools internally.

params:
max_tokens: int — output token limit (default 4096)
temperature: float | None
guardrail_config: dict | None — Bedrock guardrails
region: str — AWS region (default 'us-east-1')
"""
from agent.bedrock_adapter import build_converse_kwargs

region = params.get("region", "us-east-1")
guardrail = params.get("guardrail_config")

kwargs = build_converse_kwargs(
model=model,
messages=messages,
tools=tools,
max_tokens=params.get("max_tokens", 4096),
temperature=params.get("temperature"),
guardrail_config=guardrail,
)
# Sentinel keys for dispatch — agent pops these before the boto3 call
kwargs["__bedrock_converse__"] = True
kwargs["__bedrock_region__"] = region
return kwargs

def normalize_response(self, response: Any, **kwargs) -> NormalizedResponse:
"""Normalize Bedrock response to NormalizedResponse.

Handles two shapes:
1. Raw boto3 dict (from direct converse() calls)
2. Already-normalized SimpleNamespace with .choices (from dispatch site)
"""
from agent.bedrock_adapter import normalize_converse_response

# Normalize to OpenAI-compatible SimpleNamespace
if hasattr(response, "choices") and response.choices:
# Already normalized at dispatch site
ns = response
else:
# Raw boto3 dict
ns = normalize_converse_response(response)

choice = ns.choices[0]
msg = choice.message
finish_reason = choice.finish_reason or "stop"

tool_calls = None
if msg.tool_calls:
tool_calls = [
ToolCall(
id=tc.id,
name=tc.function.name,
arguments=tc.function.arguments,
)
for tc in msg.tool_calls
]

usage = None
if hasattr(ns, "usage") and ns.usage:
u = ns.usage
usage = Usage(
prompt_tokens=getattr(u, "prompt_tokens", 0) or 0,
completion_tokens=getattr(u, "completion_tokens", 0) or 0,
total_tokens=getattr(u, "total_tokens", 0) or 0,
)

reasoning = getattr(msg, "reasoning", None) or getattr(msg, "reasoning_content", None)

return NormalizedResponse(
content=msg.content,
tool_calls=tool_calls,
finish_reason=finish_reason,
reasoning=reasoning,
usage=usage,
)

def validate_response(self, response: Any) -> bool:
"""Check Bedrock response structure.

After normalize_converse_response, the response has OpenAI-compatible
.choices — same check as chat_completions.
"""
if response is None:
return False
# Raw Bedrock dict response — check for 'output' key
if isinstance(response, dict):
return "output" in response
# Already-normalized SimpleNamespace
if hasattr(response, "choices"):
return bool(response.choices)
return False

def map_finish_reason(self, raw_reason: str) -> str:
"""Map Bedrock stop reason to OpenAI finish_reason.

The adapter already does this mapping inside normalize_converse_response,
so this is only used for direct access to raw responses.
"""
_MAP = {
"end_turn": "stop",
"tool_use": "tool_calls",
"max_tokens": "length",
"stop_sequence": "stop",
"guardrail_intervened": "content_filter",
"content_filtered": "content_filter",
}
return _MAP.get(raw_reason, "stop")


# Auto-register on import
from agent.transports import register_transport # noqa: E402

register_transport("bedrock_converse", BedrockTransport)
43 changes: 30 additions & 13 deletions run_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -6583,6 +6583,15 @@ def _get_chat_completions_transport(self):
self._chat_completions_transport = t
return t

def _get_bedrock_transport(self):
"""Return the cached BedrockTransport instance (lazy singleton)."""
t = getattr(self, "_bedrock_transport", None)
if t is None:
from agent.transports import get_transport
t = get_transport("bedrock_converse")
self._bedrock_transport = t
return t

def _prepare_anthropic_messages_for_api(self, api_messages: list) -> list:
if not any(
isinstance(msg, dict) and self._content_has_image_parts(msg.get("content"))
Expand Down Expand Up @@ -6722,21 +6731,17 @@ def _build_api_kwargs(self, api_messages: list) -> dict:
# AWS Bedrock native Converse API — bypasses the OpenAI client entirely.
# The adapter handles message/tool conversion and boto3 calls directly.
if self.api_mode == "bedrock_converse":
from agent.bedrock_adapter import build_converse_kwargs
_bt = self._get_bedrock_transport()
region = getattr(self, "_bedrock_region", None) or "us-east-1"
guardrail = getattr(self, "_bedrock_guardrail_config", None)
return {
"__bedrock_converse__": True,
"__bedrock_region__": region,
**build_converse_kwargs(
model=self.model,
messages=api_messages,
tools=self.tools,
max_tokens=self.max_tokens or 4096,
temperature=None, # Let the model use its default
guardrail_config=guardrail,
),
}
return _bt.build_kwargs(
model=self.model,
messages=api_messages,
tools=self.tools,
max_tokens=self.max_tokens or 4096,
region=region,
guardrail_config=guardrail,
)

if self.api_mode == "codex_responses":
_ct = self._get_codex_transport()
Expand Down Expand Up @@ -9250,6 +9255,14 @@ def _stop_spinner():
error_details.append("response is None")
else:
error_details.append("response.content invalid (not a non-empty list)")
elif self.api_mode == "bedrock_converse":
_btv = self._get_bedrock_transport()
if not _btv.validate_response(response):
response_invalid = True
if response is None:
error_details.append("response is None")
else:
error_details.append("Bedrock response invalid (no output or choices)")
else:
_ctv = self._get_chat_completions_transport()
if not _ctv.validate_response(response):
Expand Down Expand Up @@ -9413,6 +9426,10 @@ def _stop_spinner():
elif self.api_mode == "anthropic_messages":
_tfr = self._get_anthropic_transport()
finish_reason = _tfr.map_finish_reason(response.stop_reason)
elif self.api_mode == "bedrock_converse":
# Bedrock response is already normalized at dispatch — finish_reason
# is already in OpenAI format via normalize_converse_response()
finish_reason = response.choices[0].finish_reason if hasattr(response, "choices") and response.choices else "stop"
else:
finish_reason = response.choices[0].finish_reason
assistant_message = response.choices[0].message
Expand Down
Loading
Loading