Add basic OTEL tracing for SingleThreadedAgentRuntime and WorkerAgentRuntime (#459)

* Downgrade protobuf from v5 to v4

* Add some telemetry blocks fo single threaded agent runtime

* Rename

* Add comments

* Update uv sync

* Address build complains

* Fix mypy

* Add tracing for worker

* Add tracing to worker

* Fix

* Fix up

* Update

* Simplify

* UpdateUpdate

* Add test dep for otel sdk

* Minor fix

* Add telemetry docs

* Simple

* Fix mypy

* Add testFix

* Fix merge

* Update telemetry.md

---------

Co-authored-by: Ryan Sweet <rysweet@microsoft.com>
Co-authored-by: Jack Gerrits <jackgerrits@users.noreply.github.com>
This commit is contained in:
Aamir 2024-09-11 16:47:55 -07:00 committed by GitHub
parent 5dce10d530
commit 461b8a8bbd
10 changed files with 698 additions and 255 deletions

View File

@ -0,0 +1,48 @@
# Telemetry
AGNext has native support for [open telemetry](https://opentelemetry.io/). This allows you to collect telemetry data from your application and send it to a telemetry backend of your choosing.
These are the components that are currently instrumented:
- Runtime (Single Threaded Agent Runtime, Worker Agent Runtime)
## Instrumenting your application
To instrument your application, you will need an sdk and an exporter. You may already have these if your application is already instrumented with open telemetry.
```bash
pip install opentelemetry-sdk
```
Depending on your open telemetry collector, you can use grpc or http to export your telemetry.
```bash
# Pick one of the following
pip install opentelemetry-exporter-otlp-proto-http
pip install opentelemetry-exporter-otlp-proto-grpc
```
Next, we need to get a tracer provider:
```python
from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
def configure_oltp_tracing(endpoint: str = None) -> trace.TracerProvider:
# Configure Tracing
tracer_provider = TracerProvider(resource=Resource({"service.name": "my-service"}))
processor = BatchSpanProcessor(OTLPSpanExporter())
tracer_provider.add_span_processor(processor)
trace.set_tracer_provider(tracer_provider)
return tracer_provider
```
Now you can send the trace_provider when creating your runtime:
```python
single_threaded_runtime = SingleThreadedAgentRuntime(tracer_provider=provider)
worker_runtime = WorkerAgentRuntime(tracer_provider=provider)
```
And that's it! Your application is now instrumented with open telemetry. You can now view your telemetry data in your telemetry backend.

View File

@ -22,7 +22,8 @@ dependencies = [
"grpcio~=1.62.0",
"protobuf~=4.25.1",
"tiktoken",
"azure-core"
"azure-core",
"opentelemetry-api~=1.27.0"
]
[tool.uv]
@ -59,6 +60,7 @@ dev-dependencies = [
"types-protobuf",
"types-requests",
"wikipedia",
"opentelemetry-sdk>=1.27.0",
]

View File

@ -11,6 +11,8 @@ from dataclasses import dataclass
from enum import Enum
from typing import Any, Awaitable, Callable, Dict, List, Mapping, ParamSpec, Set, Type, TypeVar, cast
from opentelemetry.trace import NoOpTracerProvider, TracerProvider
from ..base import (
Agent,
AgentId,
@ -28,6 +30,7 @@ from ..base import (
from ..base.exceptions import MessageDroppedException
from ..base.intervention import DropMessage, InterventionHandler
from ._helpers import SubscriptionManager, get_impl
from .telemetry import EnvelopeMetadata, get_telemetry_envelope_metadata, trace_block
logger = logging.getLogger("autogen_core")
event_logger = logging.getLogger("autogen_core.events")
@ -46,6 +49,7 @@ class PublishMessageEnvelope:
cancellation_token: CancellationToken
sender: AgentId | None
topic_id: TopicId
metadata: EnvelopeMetadata | None = None
@dataclass(kw_only=True)
@ -58,6 +62,7 @@ class SendMessageEnvelope:
recipient: AgentId
future: Future[Any]
cancellation_token: CancellationToken
metadata: EnvelopeMetadata | None = None
@dataclass(kw_only=True)
@ -68,6 +73,7 @@ class ResponseMessageEnvelope:
future: Future[Any]
sender: AgentId
recipient: AgentId | None
metadata: EnvelopeMetadata | None = None
P = ParamSpec("P")
@ -139,7 +145,13 @@ class RunContext:
class SingleThreadedAgentRuntime(AgentRuntime):
def __init__(self, *, intervention_handlers: List[InterventionHandler] | None = None) -> None:
def __init__(
self,
*,
intervention_handlers: List[InterventionHandler] | None = None,
tracer_provider: TracerProvider | None = None,
) -> None:
self._tracer = (tracer_provider if tracer_provider else NoOpTracerProvider()).get_tracer(__name__)
self._message_queue: List[PublishMessageEnvelope | SendMessageEnvelope | ResponseMessageEnvelope] = []
# (namespace, type) -> List[AgentId]
self._agent_factories: Dict[
@ -188,26 +200,28 @@ class SingleThreadedAgentRuntime(AgentRuntime):
# )
# )
future = asyncio.get_event_loop().create_future()
if recipient.type not in self._known_agent_names:
future.set_exception(Exception("Recipient not found"))
with trace_block(self._tracer, "create", recipient, parent=None):
future = asyncio.get_event_loop().create_future()
if recipient.type not in self._known_agent_names:
future.set_exception(Exception("Recipient not found"))
content = message.__dict__ if hasattr(message, "__dict__") else message
logger.info(f"Sending message of type {type(message).__name__} to {recipient.type}: {content}")
content = message.__dict__ if hasattr(message, "__dict__") else message
logger.info(f"Sending message of type {type(message).__name__} to {recipient.type}: {content}")
self._message_queue.append(
SendMessageEnvelope(
message=message,
recipient=recipient,
future=future,
cancellation_token=cancellation_token,
sender=sender,
self._message_queue.append(
SendMessageEnvelope(
message=message,
recipient=recipient,
future=future,
cancellation_token=cancellation_token,
sender=sender,
metadata=get_telemetry_envelope_metadata(),
)
)
)
cancellation_token.link_future(future)
cancellation_token.link_future(future)
return await future
return await future
async def publish_message(
self,
@ -217,26 +231,31 @@ class SingleThreadedAgentRuntime(AgentRuntime):
sender: AgentId | None = None,
cancellation_token: CancellationToken | None = None,
) -> None:
if cancellation_token is None:
cancellation_token = CancellationToken()
content = message.__dict__ if hasattr(message, "__dict__") else message
logger.info(f"Publishing message of type {type(message).__name__} to all subscribers: {content}")
with trace_block(self._tracer, "create", topic_id, parent=None):
if cancellation_token is None:
cancellation_token = CancellationToken()
content = message.__dict__ if hasattr(message, "__dict__") else message
logger.info(f"Publishing message of type {type(message).__name__} to all subscribers: {content}")
# event_logger.info(
# MessageEvent(
# payload=message,
# sender=sender,
# receiver=None,
# kind=MessageKind.PUBLISH,
# delivery_stage=DeliveryStage.SEND,
# )
# )
# event_logger.info(
# MessageEvent(
# payload=message,
# sender=sender,
# receiver=None,
# kind=MessageKind.PUBLISH,
# delivery_stage=DeliveryStage.SEND,
# )
# )
self._message_queue.append(
PublishMessageEnvelope(
message=message, cancellation_token=cancellation_token, sender=sender, topic_id=topic_id
self._message_queue.append(
PublishMessageEnvelope(
message=message,
cancellation_token=cancellation_token,
sender=sender,
topic_id=topic_id,
metadata=get_telemetry_envelope_metadata(),
)
)
)
async def save_state(self) -> Mapping[str, Any]:
state: Dict[str, Dict[str, Any]] = {}
@ -251,122 +270,131 @@ class SingleThreadedAgentRuntime(AgentRuntime):
(await self._get_agent(agent_id)).load_state(state[str(agent_id)])
async def _process_send(self, message_envelope: SendMessageEnvelope) -> None:
recipient = message_envelope.recipient
# todo: check if recipient is in the known namespaces
# assert recipient in self._agents
with trace_block(self._tracer, "send", message_envelope.recipient, parent=message_envelope.metadata):
recipient = message_envelope.recipient
# todo: check if recipient is in the known namespaces
# assert recipient in self._agents
try:
# TODO use id
sender_name = message_envelope.sender.type if message_envelope.sender is not None else "Unknown"
logger.info(
f"Calling message handler for {recipient} with message type {type(message_envelope.message).__name__} sent by {sender_name}"
)
# event_logger.info(
# MessageEvent(
# payload=message_envelope.message,
# sender=message_envelope.sender,
# receiver=recipient,
# kind=MessageKind.DIRECT,
# delivery_stage=DeliveryStage.DELIVER,
# )
# )
recipient_agent = await self._get_agent(recipient)
message_context = MessageContext(
sender=message_envelope.sender,
topic_id=None,
is_rpc=True,
cancellation_token=message_envelope.cancellation_token,
)
with MessageHandlerContext.populate_context(recipient_agent.id):
response = await recipient_agent.on_message(
message_envelope.message,
ctx=message_context,
try:
# TODO use id
sender_name = message_envelope.sender.type if message_envelope.sender is not None else "Unknown"
logger.info(
f"Calling message handler for {recipient} with message type {type(message_envelope.message).__name__} sent by {sender_name}"
)
except BaseException as e:
message_envelope.future.set_exception(e)
self._outstanding_tasks.decrement()
return
self._message_queue.append(
ResponseMessageEnvelope(
message=response,
future=message_envelope.future,
sender=message_envelope.recipient,
recipient=message_envelope.sender,
)
)
self._outstanding_tasks.decrement()
async def _process_publish(self, message_envelope: PublishMessageEnvelope) -> None:
responses: List[Awaitable[Any]] = []
recipients = await self._subscription_manager.get_subscribed_recipients(message_envelope.topic_id)
for agent_id in recipients:
# Avoid sending the message back to the sender
if message_envelope.sender is not None and agent_id == message_envelope.sender:
continue
sender_agent = (
await self._get_agent(message_envelope.sender) if message_envelope.sender is not None else None
)
sender_name = str(sender_agent.id) if sender_agent is not None else "Unknown"
logger.info(
f"Calling message handler for {agent_id.type} with message type {type(message_envelope.message).__name__} published by {sender_name}"
)
# event_logger.info(
# MessageEvent(
# payload=message_envelope.message,
# sender=message_envelope.sender,
# receiver=agent,
# kind=MessageKind.PUBLISH,
# delivery_stage=DeliveryStage.DELIVER,
# )
# )
message_context = MessageContext(
sender=message_envelope.sender,
topic_id=message_envelope.topic_id,
is_rpc=False,
cancellation_token=message_envelope.cancellation_token,
)
agent = await self._get_agent(agent_id)
with MessageHandlerContext.populate_context(agent.id):
future = agent.on_message(
message_envelope.message,
ctx=message_context,
# event_logger.info(
# MessageEvent(
# payload=message_envelope.message,
# sender=message_envelope.sender,
# receiver=recipient,
# kind=MessageKind.DIRECT,
# delivery_stage=DeliveryStage.DELIVER,
# )
# )
recipient_agent = await self._get_agent(recipient)
message_context = MessageContext(
sender=message_envelope.sender,
topic_id=None,
is_rpc=True,
cancellation_token=message_envelope.cancellation_token,
)
responses.append(future)
try:
_all_responses = await asyncio.gather(*responses)
except BaseException as e:
# Ignore cancelled errors from logs
if isinstance(e, CancelledError):
with MessageHandlerContext.populate_context(recipient_agent.id):
response = await recipient_agent.on_message(
message_envelope.message,
ctx=message_context,
)
except BaseException as e:
message_envelope.future.set_exception(e)
self._outstanding_tasks.decrement()
return
logger.error("Error processing publish message", exc_info=True)
finally:
self._message_queue.append(
ResponseMessageEnvelope(
message=response,
future=message_envelope.future,
sender=message_envelope.recipient,
recipient=message_envelope.sender,
metadata=get_telemetry_envelope_metadata(),
)
)
self._outstanding_tasks.decrement()
# TODO if responses are given for a publish
async def _process_publish(self, message_envelope: PublishMessageEnvelope) -> None:
with trace_block(self._tracer, "publish", message_envelope.topic_id, parent=message_envelope.metadata):
responses: List[Awaitable[Any]] = []
recipients = await self._subscription_manager.get_subscribed_recipients(message_envelope.topic_id)
for agent_id in recipients:
# Avoid sending the message back to the sender
if message_envelope.sender is not None and agent_id == message_envelope.sender:
continue
sender_agent = (
await self._get_agent(message_envelope.sender) if message_envelope.sender is not None else None
)
sender_name = str(sender_agent.id) if sender_agent is not None else "Unknown"
logger.info(
f"Calling message handler for {agent_id.type} with message type {type(message_envelope.message).__name__} published by {sender_name}"
)
# event_logger.info(
# MessageEvent(
# payload=message_envelope.message,
# sender=message_envelope.sender,
# receiver=agent,
# kind=MessageKind.PUBLISH,
# delivery_stage=DeliveryStage.DELIVER,
# )
# )
message_context = MessageContext(
sender=message_envelope.sender,
topic_id=message_envelope.topic_id,
is_rpc=False,
cancellation_token=message_envelope.cancellation_token,
)
agent = await self._get_agent(agent_id)
async def _on_message(agent: Agent, message_context: MessageContext) -> Any:
with trace_block(self._tracer, "process", agent.id, parent=None):
return await agent.on_message(
message_envelope.message,
ctx=message_context,
)
with MessageHandlerContext.populate_context(agent.id):
future = _on_message(agent, message_context)
responses.append(future)
try:
_all_responses = await asyncio.gather(*responses)
except BaseException as e:
# Ignore cancelled errors from logs
if isinstance(e, CancelledError):
self._outstanding_tasks.decrement()
return
logger.error("Error processing publish message", exc_info=True)
finally:
self._outstanding_tasks.decrement()
# TODO if responses are given for a publish
async def _process_response(self, message_envelope: ResponseMessageEnvelope) -> None:
content = (
message_envelope.message.__dict__
if hasattr(message_envelope.message, "__dict__")
else message_envelope.message
)
logger.info(
f"Resolving response with message type {type(message_envelope.message).__name__} for recipient {message_envelope.recipient} from {message_envelope.sender.type}: {content}"
)
# event_logger.info(
# MessageEvent(
# payload=message_envelope.message,
# sender=message_envelope.sender,
# receiver=message_envelope.recipient,
# kind=MessageKind.RESPOND,
# delivery_stage=DeliveryStage.DELIVER,
# )
# )
self._outstanding_tasks.decrement()
message_envelope.future.set_result(message_envelope.message)
with trace_block(self._tracer, "ack", message_envelope.recipient, parent=message_envelope.metadata):
content = (
message_envelope.message.__dict__
if hasattr(message_envelope.message, "__dict__")
else message_envelope.message
)
logger.info(
f"Resolving response with message type {type(message_envelope.message).__name__} for recipient {message_envelope.recipient} from {message_envelope.sender.type}: {content}"
)
# event_logger.info(
# MessageEvent(
# payload=message_envelope.message,
# sender=message_envelope.sender,
# receiver=message_envelope.recipient,
# kind=MessageKind.RESPOND,
# delivery_stage=DeliveryStage.DELIVER,
# )
# )
self._outstanding_tasks.decrement()
message_envelope.future.set_result(message_envelope.message)
async def process_next(self) -> None:
"""Process the next message in the queue."""
@ -375,21 +403,24 @@ class SingleThreadedAgentRuntime(AgentRuntime):
# Yield control to the event loop to allow other tasks to run
await asyncio.sleep(0)
return
message_envelope = self._message_queue.pop(0)
match message_envelope:
case SendMessageEnvelope(message=message, sender=sender, recipient=recipient, future=future):
if self._intervention_handlers is not None:
for handler in self._intervention_handlers:
try:
temp_message = await handler.on_send(message, sender=sender, recipient=recipient)
except BaseException as e:
future.set_exception(e)
return
if temp_message is DropMessage or isinstance(temp_message, DropMessage):
future.set_exception(MessageDroppedException())
return
with trace_block(
self._tracer, "intercept", handler.__class__.__name__, parent=message_envelope.metadata
):
try:
temp_message = await handler.on_send(message, sender=sender, recipient=recipient)
except BaseException as e:
future.set_exception(e)
return
if temp_message is DropMessage or isinstance(temp_message, DropMessage):
future.set_exception(MessageDroppedException())
return
message_envelope.message = temp_message
self._outstanding_tasks.increment()
task = asyncio.create_task(self._process_send(message_envelope))
@ -401,15 +432,19 @@ class SingleThreadedAgentRuntime(AgentRuntime):
):
if self._intervention_handlers is not None:
for handler in self._intervention_handlers:
try:
temp_message = await handler.on_publish(message, sender=sender)
except BaseException as e:
# TODO: we should raise the intervention exception to the publisher.
logger.error(f"Exception raised in in intervention handler: {e}", exc_info=True)
return
if temp_message is DropMessage or isinstance(temp_message, DropMessage):
# TODO log message dropped
return
with trace_block(
self._tracer, "intercept", handler.__class__.__name__, parent=message_envelope.metadata
):
try:
temp_message = await handler.on_publish(message, sender=sender)
except BaseException as e:
# TODO: we should raise the intervention exception to the publisher.
logger.error(f"Exception raised in in intervention handler: {e}", exc_info=True)
return
if temp_message is DropMessage or isinstance(temp_message, DropMessage):
# TODO log message dropped
return
message_envelope.message = temp_message
self._outstanding_tasks.increment()
task = asyncio.create_task(self._process_publish(message_envelope))

View File

@ -16,6 +16,7 @@ from typing import (
DefaultDict,
Dict,
List,
Literal,
Mapping,
ParamSpec,
Set,
@ -26,6 +27,7 @@ from typing import (
import grpc
from grpc.aio import StreamStreamCall
from opentelemetry.trace import NoOpTracerProvider, TracerProvider
from typing_extensions import Self
from autogen_core.base import JSON_DATA_CONTENT_TYPE
@ -48,6 +50,7 @@ from ..base import (
from ..components import TypeSubscription
from ._helpers import SubscriptionManager, get_impl
from .protos import agent_worker_pb2, agent_worker_pb2_grpc
from .telemetry import get_telemetry_grpc_metadata, trace_block
if TYPE_CHECKING:
from .protos.agent_worker_pb2_grpc import AgentRpcAsyncStub
@ -153,7 +156,8 @@ class HostConnection:
class WorkerAgentRuntime(AgentRuntime):
def __init__(self) -> None:
def __init__(self, tracer_provider: TracerProvider | None = None) -> None:
self._tracer = (tracer_provider if tracer_provider else NoOpTracerProvider()).get_tracer(__name__)
self._per_type_subscribers: DefaultDict[tuple[str, str], Set[AgentId]] = defaultdict(set)
self._agent_factories: Dict[
str, Callable[[], Agent | Awaitable[Agent]] | Callable[[AgentRuntime, AgentId], Agent | Awaitable[Agent]]
@ -228,6 +232,18 @@ class WorkerAgentRuntime(AgentRuntime):
def _known_agent_names(self) -> Set[str]:
return set(self._agent_factories.keys())
async def _send_message(
self,
runtime_message: agent_worker_pb2.Message,
send_type: Literal["send", "publish"],
recipient: AgentId | TopicId,
telemetry_metadata: Mapping[str, str],
) -> None:
if self._host_connection is None:
raise RuntimeError("Host connection is not set.")
with trace_block(self._tracer, send_type, recipient, parent=telemetry_metadata):
await self._host_connection.send(runtime_message)
async def send_message(
self,
message: Any,
@ -240,36 +256,40 @@ class WorkerAgentRuntime(AgentRuntime):
raise ValueError("Runtime must be running when sending message.")
if self._host_connection is None:
raise RuntimeError("Host connection is not set.")
# create a new future for the result
future = asyncio.get_event_loop().create_future()
async with self._pending_requests_lock:
self._next_request_id += 1
request_id = self._next_request_id
request_id_str = str(request_id)
self._pending_requests[request_id_str] = future
sender = cast(AgentId, sender)
data_type = MESSAGE_TYPE_REGISTRY.type_name(message)
serialized_message = MESSAGE_TYPE_REGISTRY.serialize(
message, type_name=data_type, data_content_type=JSON_DATA_CONTENT_TYPE
)
runtime_message = agent_worker_pb2.Message(
request=agent_worker_pb2.RpcRequest(
request_id=request_id_str,
target=agent_worker_pb2.AgentId(type=recipient.type, key=recipient.key),
source=agent_worker_pb2.AgentId(type=sender.type, key=sender.key),
payload=agent_worker_pb2.Payload(
data_type=data_type,
data=serialized_message,
data_content_type=JSON_DATA_CONTENT_TYPE,
),
with trace_block(self._tracer, "create", recipient, parent=None, attributes={"message_type": data_type}):
# create a new future for the result
future = asyncio.get_event_loop().create_future()
async with self._pending_requests_lock:
self._next_request_id += 1
request_id = self._next_request_id
request_id_str = str(request_id)
self._pending_requests[request_id_str] = future
sender = cast(AgentId, sender)
serialized_message = MESSAGE_TYPE_REGISTRY.serialize(
message, type_name=data_type, data_content_type=JSON_DATA_CONTENT_TYPE
)
)
# TODO: Find a way to handle timeouts/errors
task = asyncio.create_task(self._host_connection.send(runtime_message))
self._background_tasks.add(task)
task.add_done_callback(self._raise_on_exception)
task.add_done_callback(self._background_tasks.discard)
return await future
telemetry_metadata = get_telemetry_grpc_metadata()
runtime_message = agent_worker_pb2.Message(
request=agent_worker_pb2.RpcRequest(
request_id=request_id_str,
target=agent_worker_pb2.AgentId(type=recipient.type, key=recipient.key),
source=agent_worker_pb2.AgentId(type=sender.type, key=sender.key),
metadata=telemetry_metadata,
payload=agent_worker_pb2.Payload(
data_type=data_type,
data=serialized_message,
data_content_type=JSON_DATA_CONTENT_TYPE,
),
)
)
# TODO: Find a way to handle timeouts/errors
task = asyncio.create_task(self._send_message(runtime_message, "send", recipient, telemetry_metadata))
self._background_tasks.add(task)
task.add_done_callback(self._raise_on_exception)
task.add_done_callback(self._background_tasks.discard)
return await future
async def publish_message(
self,
@ -284,24 +304,28 @@ class WorkerAgentRuntime(AgentRuntime):
if self._host_connection is None:
raise RuntimeError("Host connection is not set.")
message_type = MESSAGE_TYPE_REGISTRY.type_name(message)
serialized_message = MESSAGE_TYPE_REGISTRY.serialize(
message, type_name=message_type, data_content_type=JSON_DATA_CONTENT_TYPE
)
runtime_message = agent_worker_pb2.Message(
event=agent_worker_pb2.Event(
topic_type=topic_id.type,
topic_source=topic_id.source,
payload=agent_worker_pb2.Payload(
data_type=message_type,
data=serialized_message,
data_content_type=JSON_DATA_CONTENT_TYPE,
),
with trace_block(self._tracer, "create", topic_id, parent=None, attributes={"message_type": message_type}):
serialized_message = MESSAGE_TYPE_REGISTRY.serialize(
message, type_name=message_type, data_content_type=JSON_DATA_CONTENT_TYPE
)
)
task = asyncio.create_task(self._host_connection.send(runtime_message))
self._background_tasks.add(task)
task.add_done_callback(self._raise_on_exception)
task.add_done_callback(self._background_tasks.discard)
telemetry_metadata = get_telemetry_grpc_metadata()
runtime_message = agent_worker_pb2.Message(
event=agent_worker_pb2.Event(
topic_type=topic_id.type,
topic_source=topic_id.source,
metadata=telemetry_metadata,
payload=agent_worker_pb2.Payload(
data_type=message_type,
data=serialized_message,
data_content_type=JSON_DATA_CONTENT_TYPE,
),
)
)
task = asyncio.create_task(self._send_message(runtime_message, "publish", topic_id, telemetry_metadata))
self._background_tasks.add(task)
task.add_done_callback(self._raise_on_exception)
task.add_done_callback(self._background_tasks.discard)
async def save_state(self) -> Mapping[str, Any]:
raise NotImplementedError("Saving state is not yet implemented.")
@ -344,13 +368,21 @@ class WorkerAgentRuntime(AgentRuntime):
# Call the target agent.
try:
with MessageHandlerContext.populate_context(target_agent.id):
result = await target_agent.on_message(message, ctx=message_context)
with trace_block(
self._tracer,
"process",
target_agent.id,
parent=request.metadata,
attributes={"request_id": request.request_id},
):
result = await target_agent.on_message(message, ctx=message_context)
except BaseException as e:
response_message = agent_worker_pb2.Message(
response=agent_worker_pb2.RpcResponse(
request_id=request.request_id,
error=str(e),
)
metadata=get_telemetry_grpc_metadata(),
),
)
# Send the error response.
await self._host_connection.send(response_message)
@ -371,6 +403,7 @@ class WorkerAgentRuntime(AgentRuntime):
data=serialized_result,
data_content_type=JSON_DATA_CONTENT_TYPE,
),
metadata=get_telemetry_grpc_metadata(),
)
)
@ -378,24 +411,27 @@ class WorkerAgentRuntime(AgentRuntime):
await self._host_connection.send(response_message)
async def _process_response(self, response: agent_worker_pb2.RpcResponse) -> None:
# Deserialize the result.
result = MESSAGE_TYPE_REGISTRY.deserialize(
response.payload.data,
type_name=response.payload.data_type,
data_content_type=response.payload.data_content_type,
)
# Get the future and set the result.
future = self._pending_requests.pop(response.request_id)
if len(response.error) > 0:
future.set_exception(Exception(response.error))
else:
future.set_result(result)
with trace_block(
self._tracer, "ack", None, parent=response.metadata, attributes={"request_id": response.request_id}
):
# Deserialize the result.
result = MESSAGE_TYPE_REGISTRY.deserialize(
response.payload.data,
type_name=response.payload.data_type,
data_content_type=response.payload.data_content_type,
)
# Get the future and set the result.
future = self._pending_requests.pop(response.request_id)
if len(response.error) > 0:
future.set_exception(Exception(response.error))
else:
future.set_result(result)
async def _process_event(self, event: agent_worker_pb2.Event) -> None:
topic_id = TopicId(event.topic_type, event.topic_source)
message = MESSAGE_TYPE_REGISTRY.deserialize(
event.payload.data, type_name=event.payload.data_type, data_content_type=event.payload.data_content_type
)
topic_id = TopicId(event.topic_type, event.topic_source)
# Get the recipients for the topic.
recipients = await self._subscription_manager.get_subscribed_recipients(topic_id)
# Send the message to each recipient.
@ -410,13 +446,18 @@ class WorkerAgentRuntime(AgentRuntime):
)
agent = await self._get_agent(agent_id)
with MessageHandlerContext.populate_context(agent.id):
future = agent.on_message(message, ctx=message_context)
async def send_message(agent: Agent, message_context: MessageContext) -> Any:
with trace_block(self._tracer, "process", agent.id, parent=event.metadata):
await agent.on_message(message, ctx=message_context)
future = send_message(agent, message_context)
responses.append(future)
# Wait for all responses.
try:
await asyncio.gather(*responses)
except BaseException as e:
logger.error("Error handling event", exc_info=e)
# Wait for all responses.
try:
await asyncio.gather(*responses)
except BaseException as e:
logger.error("Error handling event", exc_info=e)
async def register(
self,

View File

@ -23,7 +23,7 @@ if TYPE_CHECKING:
"AgentRpcAsyncStub",
"AgentRpcStub",
"Message",
"AgentId",
"AgentId"
]
else:
__all__ = ["RpcRequest", "RpcResponse", "Event", "RegisterAgentType", "AgentRpcStub", "Message", "AgentId"]

View File

@ -0,0 +1,15 @@
from ._tracing import (
EnvelopeMetadata,
TelemetryMetadataContainer,
get_telemetry_envelope_metadata,
get_telemetry_grpc_metadata,
trace_block,
)
__all__ = [
"EnvelopeMetadata",
"get_telemetry_envelope_metadata",
"get_telemetry_grpc_metadata",
"TelemetryMetadataContainer",
"trace_block",
]

View File

@ -0,0 +1,225 @@
import contextlib
import logging
from dataclasses import dataclass
from typing import Dict, Iterator, List, Literal, Mapping, Optional, Sequence, Union
from opentelemetry.context import Context
from opentelemetry.propagate import extract
from opentelemetry.trace import Link, Span, SpanKind, Tracer
from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator
from opentelemetry.util import types
from ...base import AgentId, TopicId
NAMESPACE = "autogen"
logger = logging.getLogger("autogen_core")
event_logger = logging.getLogger("autogen_core.events")
@dataclass(kw_only=True)
class EnvelopeMetadata:
"""Metadata for an envelope."""
traceparent: Optional[str] = None
tracestate: Optional[str] = None
def _get_carrier_for_envelope_metadata(envelope_metadata: EnvelopeMetadata) -> Dict[str, str]:
carrier: Dict[str, str] = {}
if envelope_metadata.traceparent is not None:
carrier["traceparent"] = envelope_metadata.traceparent
if envelope_metadata.tracestate is not None:
carrier["tracestate"] = envelope_metadata.tracestate
return carrier
def get_telemetry_envelope_metadata() -> EnvelopeMetadata:
"""
Retrieves the telemetry envelope metadata.
Returns:
EnvelopeMetadata: The envelope metadata containing the traceparent and tracestate.
"""
carrier: Dict[str, str] = {}
TraceContextTextMapPropagator().inject(carrier)
return EnvelopeMetadata(
traceparent=carrier.get("traceparent"),
tracestate=carrier.get("tracestate"),
)
def _get_carrier_for_remote_call_metadata(remote_call_metadata: Mapping[str, str]) -> Dict[str, str]:
carrier: Dict[str, str] = {}
traceparent = remote_call_metadata.get("traceparent")
tracestate = remote_call_metadata.get("tracestate")
if traceparent:
carrier["traceparent"] = traceparent
if tracestate:
carrier["tracestate"] = tracestate
return carrier
def get_telemetry_grpc_metadata(existingMetadata: Optional[Mapping[str, str]] = None) -> Dict[str, str]:
"""
Retrieves the telemetry gRPC metadata.
Args:
existingMetadata (Optional[Mapping[str, str]]): The existing metadata to include in the gRPC metadata.
Returns:
Mapping[str, str]: The gRPC metadata containing the traceparent and tracestate.
"""
carrier: Dict[str, str] = {}
TraceContextTextMapPropagator().inject(carrier)
traceparent = carrier.get("traceparent")
tracestate = carrier.get("tracestate")
metadata: Dict[str, str] = {}
if existingMetadata is not None:
for key, value in existingMetadata.items():
metadata[key] = value
if traceparent is not None:
metadata["traceparent"] = traceparent
if tracestate is not None:
metadata["tracestate"] = tracestate
return metadata
TelemetryMetadataContainer = Optional[EnvelopeMetadata] | Mapping[str, str]
def _get_telemetry_context(metadata: TelemetryMetadataContainer) -> Context:
"""
Retrieves the telemetry context from the given metadata.
Args:
metadata (Optional[EnvelopeMetadata]): The metadata containing the telemetry context.
Returns:
Context: The telemetry context extracted from the metadata, or an empty context if the metadata is None.
"""
if metadata is None:
return Context()
elif isinstance(metadata, EnvelopeMetadata):
return extract(_get_carrier_for_envelope_metadata(metadata))
elif hasattr(metadata, "__getitem__"):
return extract(_get_carrier_for_remote_call_metadata(metadata))
else:
raise ValueError(f"Unknown metadata type: {type(metadata)}")
MessagingDestination = Union[AgentId, TopicId, str, None]
# TODO: Once we figure out how the destinations are stringified, we can use that convention
# https://github.com/microsoft/agnext/issues/399
def _get_destination_str(destination: MessagingDestination) -> str:
if isinstance(destination, AgentId):
return f"{destination.type}.({destination.key})-A"
elif isinstance(destination, TopicId):
return f"{destination.type}.({destination.source})-T"
elif isinstance(destination, str):
return destination
elif destination is None:
return ""
else:
raise ValueError(f"Unknown destination type: {type(destination)}")
MessagingOperation = Literal["create", "send", "publish", "receive", "intercept", "process", "ack"]
def _get_span_name(
operation: MessagingOperation,
destination: Optional[MessagingDestination],
) -> str:
"""
Returns the span name based on the given operation and destination.
Semantic Conventions - https://opentelemetry.io/docs/specs/semconv/messaging/messaging-spans/#span-name
Parameters:
operation (MessagingOperation): The messaging operation.
destination (Optional[MessagingDestination]): The messaging destination.
Returns:
str: The span name.
"""
span_parts: List[str] = [operation]
destination_str = _get_destination_str(destination)
if destination_str:
span_parts.append(destination_str)
span_name = " ".join(span_parts)
return f"{NAMESPACE} {span_name}"
def _get_span_kind(operation: MessagingOperation) -> SpanKind:
"""
Determines the span kind based on the given messaging operation.
Semantic Conventions - https://opentelemetry.io/docs/specs/semconv/messaging/messaging-spans/#span-kind
Parameters:
operation (MessagingOperation): The messaging operation.
Returns:
SpanKind: The span kind based on the messaging operation.
"""
if operation in ["create", "send", "publish"]:
return SpanKind.PRODUCER
elif operation in ["receive", "intercept", "process", "ack"]:
return SpanKind.CONSUMER
else:
return SpanKind.CLIENT
@contextlib.contextmanager
def trace_block(
tracer: Tracer,
operation: MessagingOperation,
destination: MessagingDestination,
parent: Optional[TelemetryMetadataContainer],
*,
kind: Optional[SpanKind] = None,
attributes: Optional[types.Attributes] = None,
links: Optional[Sequence[Link]] = None,
start_time: Optional[int] = None,
record_exception: bool = True,
set_status_on_exception: bool = True,
end_on_exit: bool = True,
) -> Iterator[Span]:
"""
Thin wrapper on top of start_as_current_span.
1. It helps us follow semantic conventions
2. It helps us get contexts from metadata so we can get nested spans
Args:
tracer (Tracer): The tracer to use for tracing.
operation (MessagingOperation): The messaging operation being performed.
destination (MessagingDestination): The messaging destination being used.
parent Optional[TelemetryMetadataContainer]: The parent telemetry metadta context
kind (SpanKind, optional): The kind of span. If not provided, it maps to PRODUCER or CONSUMER depending on the operation.
attributes (Optional[types.Attributes], optional): Additional attributes for the span. Defaults to None.
links (Optional[Sequence[Link]], optional): Links to other spans. Defaults to None.
start_time (Optional[int], optional): The start time of the span. Defaults to None.
record_exception (bool, optional): Whether to record exceptions. Defaults to True.
set_status_on_exception (bool, optional): Whether to set the status on exception. Defaults to True.
end_on_exit (bool, optional): Whether to end the span on exit. Defaults to True.
Yields:
Iterator[Span]: The span object.
"""
span_name = _get_span_name(operation, destination)
span_kind = kind or _get_span_kind(operation)
context = _get_telemetry_context(parent) if parent else None
with tracer.start_as_current_span(
span_name,
context,
span_kind,
attributes,
links,
start_time,
record_exception,
set_status_on_exception,
end_on_exit,
) as span:
yield span

View File

@ -1,12 +1,25 @@
import asyncio
import pytest
from autogen_core.application import SingleThreadedAgentRuntime
from autogen_core.components import TypeSubscription, DefaultTopicId, DefaultSubscription
from autogen_core.base import AgentId, AgentInstantiationContext
from autogen_core.base import TopicId
from autogen_core.base import Subscription
from autogen_core.base import SubscriptionInstantiationContext
from autogen_core.base import (
AgentId,
AgentInstantiationContext,
Subscription,
SubscriptionInstantiationContext,
TopicId,
)
from autogen_core.components import DefaultSubscription, DefaultTopicId, TypeSubscription
from test_utils import CascadingAgent, CascadingMessageType, LoopbackAgent, MessageType, NoopAgent
from test_utils.telemetry_test_utils import TestExporter, get_test_tracer_provider
from opentelemetry.sdk.trace import TracerProvider
test_exporter = TestExporter()
@pytest.fixture
def tracer_provider() -> TracerProvider:
test_exporter.clear()
return get_test_tracer_provider(test_exporter)
@pytest.mark.asyncio
@ -29,8 +42,8 @@ async def test_agent_names_must_be_unique() -> None:
@pytest.mark.asyncio
async def test_register_receives_publish() -> None:
runtime = SingleThreadedAgentRuntime()
async def test_register_receives_publish(tracer_provider: TracerProvider) -> None:
runtime = SingleThreadedAgentRuntime(tracer_provider=tracer_provider)
await runtime.register("name", LoopbackAgent)
runtime.start()
@ -49,6 +62,11 @@ async def test_register_receives_publish() -> None:
other_long_running_agent: LoopbackAgent = await runtime.try_get_underlying_agent_instance(AgentId("name", key="other"), type=LoopbackAgent)
assert other_long_running_agent.num_calls == 0
exported_spans = test_exporter.get_exported_spans()
assert len(exported_spans) == 3
span_names = [span.name for span in exported_spans]
assert span_names == ["autogen create default.(default)-T", "autogen process name.(default)-A", "autogen publish default.(default)-T"]
@pytest.mark.asyncio
async def test_register_receives_publish_cascade() -> None:

View File

@ -0,0 +1,29 @@
from typing import List, Sequence
from opentelemetry.sdk.trace import ReadableSpan, TracerProvider
from opentelemetry.sdk.trace.export import SimpleSpanProcessor, SpanExporter, SpanExportResult
class TestExporter(SpanExporter):
def __init__(self) -> None:
self.exported_spans: List[ReadableSpan] = []
def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
self.exported_spans.extend(spans)
return SpanExportResult.SUCCESS
def shutdown(self) -> None:
pass
def clear(self) -> None:
"""Clears the list of exported spans."""
self.exported_spans.clear()
def get_exported_spans(self) -> List[ReadableSpan]:
"""Returns the list of exported spans."""
return self.exported_spans
def get_test_tracer_provider(exporter: TestExporter) -> TracerProvider:
tracer_provider = TracerProvider()
tracer_provider.add_span_processor(SimpleSpanProcessor(exporter))
return tracer_provider

70
python/uv.lock generated
View File

@ -16,12 +16,9 @@ resolution-markers = [
"(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_system != 'Darwin') or (python_full_version < '3.11' and platform_system != 'Darwin' and platform_system != 'Linux')",
"(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_system != 'Darwin') or (python_full_version == '3.11.*' and platform_system != 'Darwin' and platform_system != 'Linux')",
"(python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine != 'aarch64' and platform_system != 'Darwin') or (python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_system != 'Darwin' and platform_system != 'Linux')",
"python_full_version >= '3.12.4' and python_full_version < '3.13' and platform_system == 'Darwin'",
"python_full_version >= '3.12.4' and python_full_version < '3.13' and platform_machine == 'aarch64' and platform_system == 'Linux'",
"(python_full_version >= '3.12.4' and python_full_version < '3.13' and platform_machine != 'aarch64' and platform_system != 'Darwin') or (python_full_version >= '3.12.4' and python_full_version < '3.13' and platform_system != 'Darwin' and platform_system != 'Linux')",
"python_full_version >= '3.13' and platform_system == 'Darwin'",
"python_full_version >= '3.13' and platform_machine == 'aarch64' and platform_system == 'Linux'",
"(python_full_version >= '3.13' and platform_machine != 'aarch64' and platform_system != 'Darwin') or (python_full_version >= '3.13' and platform_system != 'Darwin' and platform_system != 'Linux')",
"python_full_version >= '3.12.4' and platform_system == 'Darwin'",
"python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_system == 'Linux'",
"(python_full_version >= '3.12.4' and platform_machine != 'aarch64' and platform_system != 'Darwin') or (python_full_version >= '3.12.4' and platform_system != 'Darwin' and platform_system != 'Linux')",
]
[manifest]
@ -284,6 +281,7 @@ dependencies = [
{ name = "azure-core" },
{ name = "grpcio" },
{ name = "openai" },
{ name = "opentelemetry-api" },
{ name = "pillow" },
{ name = "protobuf" },
{ name = "pydantic" },
@ -309,6 +307,7 @@ dev = [
{ name = "markdownify" },
{ name = "myst-nb" },
{ name = "nbqa" },
{ name = "opentelemetry-sdk" },
{ name = "pip" },
{ name = "polars" },
{ name = "python-dotenv" },
@ -333,6 +332,7 @@ requires-dist = [
{ name = "azure-core" },
{ name = "grpcio", specifier = "~=1.62.0" },
{ name = "openai", specifier = ">=1.3" },
{ name = "opentelemetry-api", specifier = "~=1.27.0" },
{ name = "pillow" },
{ name = "protobuf", specifier = "~=4.25.1" },
{ name = "pydantic", specifier = ">=1.10,<3" },
@ -358,6 +358,7 @@ dev = [
{ name = "markdownify" },
{ name = "myst-nb" },
{ name = "nbqa" },
{ name = "opentelemetry-sdk", specifier = ">=1.27.0" },
{ name = "pip" },
{ name = "polars" },
{ name = "python-dotenv" },
@ -2445,7 +2446,6 @@ version = "12.1.3.1"
source = { registry = "https://pypi.org/simple" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:ee53ccca76a6fc08fb9701aa95b6ceb242cdaab118c3bb152af4e579af792728", size = 410594774 },
{ url = "https://files.pythonhosted.org/packages/c5/ef/32a375b74bea706c93deea5613552f7c9104f961b21df423f5887eca713b/nvidia_cublas_cu12-12.1.3.1-py3-none-win_amd64.whl", hash = "sha256:2b964d60e8cf11b5e1073d179d85fa340c120e99b3067558f3cf98dd69d02906", size = 439918445 },
]
[[package]]
@ -2454,7 +2454,6 @@ version = "12.1.105"
source = { registry = "https://pypi.org/simple" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:e54fde3983165c624cb79254ae9818a456eb6e87a7fd4d56a2352c24ee542d7e", size = 14109015 },
{ url = "https://files.pythonhosted.org/packages/d0/56/0021e32ea2848c24242f6b56790bd0ccc8bf99f973ca790569c6ca028107/nvidia_cuda_cupti_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:bea8236d13a0ac7190bd2919c3e8e6ce1e402104276e6f9694479e48bb0eb2a4", size = 10154340 },
]
[[package]]
@ -2463,7 +2462,6 @@ version = "12.1.105"
source = { registry = "https://pypi.org/simple" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:339b385f50c309763ca65456ec75e17bbefcbbf2893f462cb8b90584cd27a1c2", size = 23671734 },
{ url = "https://files.pythonhosted.org/packages/ad/1d/f76987c4f454eb86e0b9a0e4f57c3bf1ac1d13ad13cd1a4da4eb0e0c0ce9/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:0a98a522d9ff138b96c010a65e145dc1b4850e9ecb75a0172371793752fd46ed", size = 19331863 },
]
[[package]]
@ -2472,7 +2470,6 @@ version = "12.1.105"
source = { registry = "https://pypi.org/simple" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:6e258468ddf5796e25f1dc591a31029fa317d97a0a94ed93468fc86301d61e40", size = 823596 },
{ url = "https://files.pythonhosted.org/packages/9f/e2/7a2b4b5064af56ea8ea2d8b2776c0f2960d95c88716138806121ae52a9c9/nvidia_cuda_runtime_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:dfb46ef84d73fababab44cf03e3b83f80700d27ca300e537f85f636fac474344", size = 821226 },
]
[[package]]
@ -2484,7 +2481,6 @@ dependencies = [
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/9f/fd/713452cd72343f682b1c7b9321e23829f00b842ceaedcda96e742ea0b0b3/nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f", size = 664752741 },
{ url = "https://files.pythonhosted.org/packages/3f/d0/f90ee6956a628f9f04bf467932c0a25e5a7e706a684b896593c06c82f460/nvidia_cudnn_cu12-9.1.0.70-py3-none-win_amd64.whl", hash = "sha256:6278562929433d68365a07a4a1546c237ba2849852c0d4b2262a486e805b977a", size = 679925892 },
]
[[package]]
@ -2493,7 +2489,6 @@ version = "11.0.2.54"
source = { registry = "https://pypi.org/simple" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl", hash = "sha256:794e3948a1aa71fd817c3775866943936774d1c14e7628c74f6f7417224cdf56", size = 121635161 },
{ url = "https://files.pythonhosted.org/packages/f7/57/7927a3aa0e19927dfed30256d1c854caf991655d847a4e7c01fe87e3d4ac/nvidia_cufft_cu12-11.0.2.54-py3-none-win_amd64.whl", hash = "sha256:d9ac353f78ff89951da4af698f80870b1534ed69993f10a4cf1d96f21357e253", size = 121344196 },
]
[[package]]
@ -2502,7 +2497,6 @@ version = "10.3.2.106"
source = { registry = "https://pypi.org/simple" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:9d264c5036dde4e64f1de8c50ae753237c12e0b1348738169cd0f8a536c0e1e0", size = 56467784 },
{ url = "https://files.pythonhosted.org/packages/5c/97/4c9c7c79efcdf5b70374241d48cf03b94ef6707fd18ea0c0f53684931d0b/nvidia_curand_cu12-10.3.2.106-py3-none-win_amd64.whl", hash = "sha256:75b6b0c574c0037839121317e17fd01f8a69fd2ef8e25853d826fec30bdba74a", size = 55995813 },
]
[[package]]
@ -2516,7 +2510,6 @@ dependencies = [
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl", hash = "sha256:8a7ec542f0412294b15072fa7dab71d31334014a69f953004ea7a118206fe0dd", size = 124161928 },
{ url = "https://files.pythonhosted.org/packages/b8/80/8fca0bf819122a631c3976b6fc517c1b10741b643b94046bd8dd451522c5/nvidia_cusolver_cu12-11.4.5.107-py3-none-win_amd64.whl", hash = "sha256:74e0c3a24c78612192a74fcd90dd117f1cf21dea4822e66d89e8ea80e3cd2da5", size = 121643081 },
]
[[package]]
@ -2528,7 +2521,6 @@ dependencies = [
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:f3b50f42cf363f86ab21f720998517a659a48131e8d538dc02f8768237bd884c", size = 195958278 },
{ url = "https://files.pythonhosted.org/packages/0f/95/48fdbba24c93614d1ecd35bc6bdc6087bd17cbacc3abc4b05a9c2a1ca232/nvidia_cusparse_cu12-12.1.0.106-py3-none-win_amd64.whl", hash = "sha256:b798237e81b9719373e8fae8d4f091b70a0cf09d9d85c95a557e11df2d8e9a5a", size = 195414588 },
]
[[package]]
@ -2547,7 +2539,6 @@ source = { registry = "https://pypi.org/simple" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/58/8c/69c9e39cd6bfa813852a94e9bd3c075045e2707d163e9dc2326c82d2c330/nvidia_nvjitlink_cu12-12.6.68-py3-none-manylinux2014_aarch64.whl", hash = "sha256:b3fd0779845f68b92063ab1393abab1ed0a23412fc520df79a8190d098b5cd6b", size = 19253287 },
{ url = "https://files.pythonhosted.org/packages/a8/48/a9775d377cb95585fb188b469387f58ba6738e268de22eae2ad4cedb2c41/nvidia_nvjitlink_cu12-12.6.68-py3-none-manylinux2014_x86_64.whl", hash = "sha256:125a6c2a44e96386dda634e13d944e60b07a0402d391a070e8fb4104b34ea1ab", size = 19725597 },
{ url = "https://files.pythonhosted.org/packages/00/d5/02af3b39427ed71e8c40b6912271499ec186a72405bcb7e4ca26ff70678c/nvidia_nvjitlink_cu12-12.6.68-py3-none-win_amd64.whl", hash = "sha256:a55744c98d70317c5e23db14866a8cc2b733f7324509e941fc96276f9f37801d", size = 161730369 },
]
[[package]]
@ -2556,7 +2547,6 @@ version = "12.1.105"
source = { registry = "https://pypi.org/simple" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:dc21cf308ca5691e7c04d962e213f8a4aa9bbfa23d95412f452254c2caeb09e5", size = 99138 },
{ url = "https://files.pythonhosted.org/packages/b8/d7/bd7cb2d95ac6ac6e8d05bfa96cdce69619f1ef2808e072919044c2d47a8c/nvidia_nvtx_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:65f4d98982b31b60026e0e6de73fbdfc09d08a96f4656dd3665ca616a11e1e82", size = 66307 },
]
[[package]]
@ -2607,6 +2597,46 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c0/da/977ded879c29cbd04de313843e76868e6e13408a94ed6b987245dc7c8506/openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2", size = 250910 },
]
[[package]]
name = "opentelemetry-api"
version = "1.27.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "deprecated" },
{ name = "importlib-metadata" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c9/83/93114b6de85a98963aec218a51509a52ed3f8de918fe91eb0f7299805c3f/opentelemetry_api-1.27.0.tar.gz", hash = "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342", size = 62693 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fb/1f/737dcdbc9fea2fa96c1b392ae47275165a7c641663fbb08a8d252968eed2/opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7", size = 63970 },
]
[[package]]
name = "opentelemetry-sdk"
version = "1.27.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
{ name = "opentelemetry-semantic-conventions" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/0d/9a/82a6ac0f06590f3d72241a587cb8b0b751bd98728e896cc4cbd4847248e6/opentelemetry_sdk-1.27.0.tar.gz", hash = "sha256:d525017dea0ccce9ba4e0245100ec46ecdc043f2d7b8315d56b19aff0904fa6f", size = 145019 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c1/bd/a6602e71e315055d63b2ff07172bd2d012b4cba2d4e00735d74ba42fc4d6/opentelemetry_sdk-1.27.0-py3-none-any.whl", hash = "sha256:365f5e32f920faf0fd9e14fdfd92c086e317eaa5f860edba9cdc17a380d9197d", size = 110505 },
]
[[package]]
name = "opentelemetry-semantic-conventions"
version = "0.48b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "deprecated" },
{ name = "opentelemetry-api" },
]
sdist = { url = "https://files.pythonhosted.org/packages/0a/89/1724ad69f7411772446067cdfa73b598694c8c91f7f8c922e344d96d81f9/opentelemetry_semantic_conventions-0.48b0.tar.gz", hash = "sha256:12d74983783b6878162208be57c9effcb89dc88691c64992d70bb89dc00daa1a", size = 89445 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b7/7a/4f0063dbb0b6c971568291a8bc19a4ca70d3c185db2d956230dd67429dfc/opentelemetry_semantic_conventions-0.48b0-py3-none-any.whl", hash = "sha256:a0de9f45c413a8669788a38569c7e0a11ce6ce97861a628cca785deecdc32a1f", size = 149685 },
]
[[package]]
name = "orjson"
version = "3.10.7"
@ -4036,7 +4066,7 @@ name = "sqlalchemy"
version = "2.0.32"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "greenlet", marker = "(python_full_version < '3.13' and platform_machine == 'AMD64') or (python_full_version < '3.13' and platform_machine == 'WIN32') or (python_full_version < '3.13' and platform_machine == 'aarch64') or (python_full_version < '3.13' and platform_machine == 'amd64') or (python_full_version < '3.13' and platform_machine == 'ppc64le') or (python_full_version < '3.13' and platform_machine == 'win32') or (python_full_version < '3.13' and platform_machine == 'x86_64')" },
{ name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/af/6f/967e987683908af816aa3072c1a6997ac9933cf38d66b0474fb03f253323/SQLAlchemy-2.0.32.tar.gz", hash = "sha256:c1b88cc8b02b6a5f0efb0345a03672d4c897dc7d92585176f88c67346f565ea8", size = 9546691 }
@ -4381,7 +4411,7 @@ dependencies = [
{ name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
{ name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
{ name = "sympy" },
{ name = "triton", marker = "python_full_version < '3.13' and platform_machine == 'x86_64' and platform_system == 'Linux'" },
{ name = "triton", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
{ name = "typing-extensions" },
]
wheels = [
@ -4502,7 +4532,7 @@ name = "triton"
version = "3.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "filelock", marker = "(python_full_version < '3.13' and platform_machine != 'aarch64' and platform_system != 'Darwin') or (python_full_version < '3.13' and platform_system != 'Darwin' and platform_system != 'Linux')" },
{ name = "filelock", marker = "(platform_machine != 'aarch64' and platform_system != 'Darwin') or (platform_system != 'Darwin' and platform_system != 'Linux')" },
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/45/27/14cc3101409b9b4b9241d2ba7deaa93535a217a211c86c4cc7151fb12181/triton-3.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e1efef76935b2febc365bfadf74bcb65a6f959a9872e5bddf44cc9e0adce1e1a", size = 209376304 },