Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 2 additions & 3 deletions python/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ ANTHROPIC_MODEL=""
OLLAMA_ENDPOINT=""
OLLAMA_MODEL=""
# Observability
ENABLE_OTEL=true
ENABLE_INSTRUMENTATION=true
ENABLE_SENSITIVE_DATA=true
OTLP_ENDPOINT="http://localhost:4317/"
# APPLICATIONINSIGHTS_CONNECTION_STRING="..."
OTEL_EXPORTER_OTLP_ENDPOINT="http://localhost:4317/"
6 changes: 5 additions & 1 deletion python/packages/a2a/agent_framework_a2a/_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import re
import uuid
from collections.abc import AsyncIterable, Sequence
from typing import Any, cast
from typing import Any, Final, cast

import httpx
from a2a.client import Client, ClientConfig, ClientFactory, minimal_agent_card
Expand Down Expand Up @@ -38,6 +38,7 @@
UriContent,
prepend_agent_framework_to_user_agent,
)
from agent_framework.observability import use_agent_instrumentation

__all__ = ["A2AAgent"]

Expand All @@ -58,6 +59,7 @@ def _get_uri_data(uri: str) -> str:
return match.group("base64_data")


@use_agent_instrumentation
class A2AAgent(BaseAgent):
"""Agent2Agent (A2A) protocol implementation.

Expand All @@ -69,6 +71,8 @@ class A2AAgent(BaseAgent):
Can be initialized with a URL, AgentCard, or existing A2A Client instance.
"""

AGENT_PROVIDER_NAME: Final[str] = "A2A"

def __init__(
self,
*,
Expand Down
4 changes: 2 additions & 2 deletions python/packages/ag-ui/agent_framework_ag_ui/_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from agent_framework._middleware import use_chat_middleware
from agent_framework._tools import use_function_invocation
from agent_framework._types import BaseContent, Contents
from agent_framework.observability import use_observability
from agent_framework.observability import use_instrumentation

from ._event_converters import AGUIEventConverter
from ._http_service import AGUIHttpService
Expand Down Expand Up @@ -89,7 +89,7 @@ async def response_wrapper(self, *args: Any, **kwargs: Any) -> ChatResponse:

@_apply_server_function_call_unwrap
@use_function_invocation
@use_observability
@use_instrumentation
@use_chat_middleware
class AGUIChatClient(BaseChatClient):
"""Chat client for communicating with AG-UI compliant servers.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
)
from agent_framework._pydantic import AFBaseSettings
from agent_framework.exceptions import ServiceInitializationError
from agent_framework.observability import use_observability
from agent_framework.observability import use_instrumentation
from anthropic import AsyncAnthropic
from anthropic.types.beta import (
BetaContentBlock,
Expand Down Expand Up @@ -110,7 +110,7 @@ class AnthropicSettings(AFBaseSettings):


@use_function_invocation
@use_observability
@use_instrumentation
@use_chat_middleware
class AnthropicClient(BaseChatClient):
"""Anthropic Chat client."""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
use_function_invocation,
)
from agent_framework.exceptions import ServiceInitializationError, ServiceResponseException
from agent_framework.observability import use_observability
from agent_framework.observability import use_instrumentation
from azure.ai.agents.aio import AgentsClient
from azure.ai.agents.models import (
Agent,
Expand Down Expand Up @@ -107,7 +107,7 @@


@use_function_invocation
@use_observability
@use_instrumentation
@use_chat_middleware
class AzureAIAgentClient(BaseChatClient):
"""Azure AI Agent Chat client."""
Expand Down
91 changes: 79 additions & 12 deletions python/packages/azure-ai/agent_framework_azure_ai/_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
use_function_invocation,
)
from agent_framework.exceptions import ServiceInitializationError, ServiceInvalidRequestError
from agent_framework.observability import use_observability
from agent_framework.observability import use_instrumentation
from agent_framework.openai._responses_client import OpenAIBaseResponsesClient
from azure.ai.projects.aio import AIProjectClient
from azure.ai.projects.models import (
Expand Down Expand Up @@ -49,7 +49,7 @@


@use_function_invocation
@use_observability
@use_instrumentation
@use_chat_middleware
class AzureAIClient(OpenAIBaseResponsesClient):
"""Azure AI Agent client."""
Expand Down Expand Up @@ -164,27 +164,94 @@ def __init__(
# Track whether we should close client connection
self._should_close_client = should_close_client

async def setup_azure_ai_observability(self, enable_sensitive_data: bool | None = None) -> None:
"""Use this method to setup tracing in your Azure AI Project.
async def configure_azure_monitor(
self,
enable_sensitive_data: bool = False,
**kwargs: Any,
) -> None:
"""Setup observability with Azure Monitor (Azure AI Foundry integration).

This method configures Azure Monitor for telemetry collection using the
connection string from the Azure AI project client.

This will take the connection string from the project project_client.
It will override any connection string that is set in the environment variables.
It will disable any OTLP endpoint that might have been set.
Args:
enable_sensitive_data: Enable sensitive data logging (prompts, responses).
Should only be enabled in development/test environments. Default is False.
**kwargs: Additional arguments passed to configure_azure_monitor().
Common options include:
- enable_live_metrics (bool): Enable Azure Monitor Live Metrics
- credential (TokenCredential): Azure credential for Entra ID auth
- resource (Resource): Custom OpenTelemetry resource
See https://learn.microsoft.com/python/api/azure-monitor-opentelemetry/azure.monitor.opentelemetry.configure_azure_monitor
for full list of options.

Raises:
ImportError: If azure-monitor-opentelemetry-exporter is not installed.

Examples:
.. code-block:: python

from agent_framework.azure import AzureAIClient
from azure.ai.projects.aio import AIProjectClient
from azure.identity.aio import DefaultAzureCredential

async with (
DefaultAzureCredential() as credential,
AIProjectClient(
endpoint="https://your-project.api.azureml.ms", credential=credential
) as project_client,
AzureAIClient(project_client=project_client) as client,
):
# Setup observability with defaults
await client.configure_azure_monitor()

# With live metrics enabled
await client.configure_azure_monitor(enable_live_metrics=True)

# With sensitive data logging (dev/test only)
await client.configure_azure_monitor(enable_sensitive_data=True)

Note:
This method retrieves the Application Insights connection string from the
Azure AI project client automatically. You must have Application Insights
configured in your Azure AI project for this to work.
"""
# Get connection string from project client
try:
conn_string = await self.project_client.telemetry.get_application_insights_connection_string()
except ResourceNotFoundError:
logger.warning(
"No Application Insights connection string found for the Azure AI Project, "
"please call setup_observability() manually."
"No Application Insights connection string found for the Azure AI Project. "
"Please ensure Application Insights is configured in your Azure AI project, "
"or call configure_otel_providers() manually with custom exporters."
)
return
from agent_framework.observability import setup_observability

setup_observability(
applicationinsights_connection_string=conn_string, enable_sensitive_data=enable_sensitive_data
# Import Azure Monitor with proper error handling
try:
from azure.monitor.opentelemetry import configure_azure_monitor
except ImportError as exc:
raise ImportError(
"azure-monitor-opentelemetry is required for Azure Monitor integration. "
"Install it with: pip install azure-monitor-opentelemetry"
) from exc

from agent_framework.observability import create_metric_views, create_resource, enable_instrumentation

# Create resource if not provided in kwargs
if "resource" not in kwargs:
kwargs["resource"] = create_resource()

# Configure Azure Monitor with connection string and kwargs
configure_azure_monitor(
connection_string=conn_string,
views=create_metric_views(),
**kwargs,
)

# Complete setup with core observability
enable_instrumentation(enable_sensitive_data=enable_sensitive_data)

async def __aenter__(self) -> "Self":
"""Async context manager entry."""
return self
Expand Down
8 changes: 4 additions & 4 deletions python/packages/core/agent_framework/_agents.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
ToolMode,
)
from .exceptions import AgentExecutionException, AgentInitializationError
from .observability import use_agent_observability
from .observability import use_agent_instrumentation

if sys.version_info >= (3, 12):
from typing import override # type: ignore # pragma: no cover
Expand Down Expand Up @@ -516,8 +516,8 @@ def _prepare_context_providers(


@use_agent_middleware
@use_agent_observability
class ChatAgent(BaseAgent):
@use_agent_instrumentation(capture_usage=False) # type: ignore[arg-type,misc]
class ChatAgent(BaseAgent): # type: ignore[misc]
"""A Chat Client Agent.

This is the primary agent implementation that uses a chat client to interact
Expand Down Expand Up @@ -583,7 +583,7 @@ def get_weather(location: str) -> str:
print(update.text, end="")
"""

AGENT_SYSTEM_NAME: ClassVar[str] = "microsoft.agent_framework"
AGENT_PROVIDER_NAME: ClassVar[str] = "microsoft.agent_framework"

def __init__(
self,
Expand Down
3 changes: 2 additions & 1 deletion python/packages/core/agent_framework/_clients.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from pydantic import BaseModel

from ._logging import get_logger
from ._mcp import MCPTool
from ._memory import AggregateContextProvider, ContextProvider
from ._middleware import (
ChatMiddleware,
Expand Down Expand Up @@ -426,6 +425,8 @@ async def _normalize_tools(
else [tools]
)
for tool in tools_list: # type: ignore[reportUnknownType]
from ._mcp import MCPTool

if isinstance(tool, MCPTool):
if not tool.is_connected:
await tool.connect()
Expand Down
12 changes: 7 additions & 5 deletions python/packages/core/agent_framework/_memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,13 @@
from collections.abc import MutableSequence, Sequence
from contextlib import AsyncExitStack
from types import TracebackType
from typing import Any, Final, cast
from typing import TYPE_CHECKING, Any, Final, cast

from ._tools import ToolProtocol
from ._types import ChatMessage

if TYPE_CHECKING:
from ._tools import ToolProtocol

if sys.version_info >= (3, 12):
from typing import override # type: ignore # pragma: no cover
else:
Expand Down Expand Up @@ -54,7 +56,7 @@ def __init__(
self,
instructions: str | None = None,
messages: Sequence[ChatMessage] | None = None,
tools: Sequence[ToolProtocol] | None = None,
tools: Sequence["ToolProtocol"] | None = None,
):
"""Create a new Context object.

Expand All @@ -65,7 +67,7 @@ def __init__(
"""
self.instructions = instructions
self.messages: Sequence[ChatMessage] = messages or []
self.tools: Sequence[ToolProtocol] = tools or []
self.tools: Sequence["ToolProtocol"] = tools or []


# region ContextProvider
Expand Down Expand Up @@ -247,7 +249,7 @@ async def invoking(self, messages: ChatMessage | MutableSequence[ChatMessage], *
contexts = await asyncio.gather(*[provider.invoking(messages, **kwargs) for provider in self.providers])
instructions: str = ""
return_messages: list[ChatMessage] = []
tools: list[ToolProtocol] = []
tools: list["ToolProtocol"] = []
for ctx in contexts:
if ctx.instructions:
instructions += ctx.instructions
Expand Down
6 changes: 6 additions & 0 deletions python/packages/core/agent_framework/_serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -339,11 +339,17 @@ def to_dict(self, *, exclude: set[str] | None = None, exclude_none: bool = True)
continue
# Handle dicts containing SerializationProtocol values
if isinstance(value, dict):
from datetime import date, datetime, time

serialized_dict: dict[str, Any] = {}
for k, v in value.items():
if isinstance(v, SerializationProtocol):
serialized_dict[k] = v.to_dict(exclude=exclude, exclude_none=exclude_none)
continue
# Convert datetime objects to strings
if isinstance(v, (datetime, date, time)):
serialized_dict[k] = str(v)
continue
# Check if the value is JSON serializable
if is_serializable(v):
serialized_dict[k] = v
Expand Down
7 changes: 4 additions & 3 deletions python/packages/core/agent_framework/_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -1816,13 +1816,14 @@ def prepare_function_call_results(content: Contents | Any | list[Contents | Any]
"""Prepare the values of the function call results."""
if isinstance(content, Contents):
# For BaseContent objects, use to_dict and serialize to JSON
return json.dumps(content.to_dict(exclude={"raw_representation", "additional_properties"}))
# Use default=str to handle datetime and other non-JSON-serializable objects
return json.dumps(content.to_dict(exclude={"raw_representation", "additional_properties"}), default=str)

dumpable = _prepare_function_call_results_as_dumpable(content)
if isinstance(dumpable, str):
return dumpable
# fallback
return json.dumps(dumpable)
# fallback - use default=str to handle datetime and other non-JSON-serializable objects
return json.dumps(dumpable, default=str)


# region Chat Response constants
Expand Down
4 changes: 2 additions & 2 deletions python/packages/core/agent_framework/azure/_chat_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
use_function_invocation,
)
from agent_framework.exceptions import ServiceInitializationError
from agent_framework.observability import use_observability
from agent_framework.observability import use_instrumentation
from agent_framework.openai._chat_client import OpenAIBaseChatClient

from ._shared import (
Expand All @@ -41,7 +41,7 @@


@use_function_invocation
@use_observability
@use_instrumentation
@use_chat_middleware
class AzureOpenAIChatClient(AzureOpenAIConfigMixin, OpenAIBaseChatClient):
"""Azure OpenAI Chat completion class."""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

from agent_framework import use_chat_middleware, use_function_invocation
from agent_framework.exceptions import ServiceInitializationError
from agent_framework.observability import use_observability
from agent_framework.observability import use_instrumentation
from agent_framework.openai._responses_client import OpenAIBaseResponsesClient

from ._shared import (
Expand All @@ -22,7 +22,7 @@


@use_function_invocation
@use_observability
@use_instrumentation
@use_chat_middleware
class AzureOpenAIResponsesClient(AzureOpenAIConfigMixin, OpenAIBaseResponsesClient):
"""Azure Responses completion class."""
Expand Down
Loading
Loading