Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 29 additions & 0 deletions logfire/_internal/integrations/llm_providers/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from openai.types.chat.chat_completion import ChatCompletion
from openai.types.chat.chat_completion_chunk import ChatCompletionChunk
from openai.types.completion import Completion
from openai.types.conversations import Conversation
from openai.types.create_embedding_response import CreateEmbeddingResponse
from openai.types.images_response import ImagesResponse
from openai.types.responses import Response
Expand All @@ -19,6 +20,7 @@

from ...utils import handle_internal_errors, log_internal_error
from .semconv import (
CONVERSATION_ID,
OPERATION_NAME,
PROVIDER_NAME,
REQUEST_FREQUENCY_PENALTY,
Expand Down Expand Up @@ -122,6 +124,12 @@ def get_endpoint_config(options: FinalRequestOptions) -> EndpointConfig:
}
_extract_request_parameters(json_data, span_data)

if (conversation := json_data.get('conversation')) is not None:
if isinstance(conversation, str):
span_data[CONVERSATION_ID] = conversation
elif isinstance(conversation, dict) and 'id' in conversation:
span_data[CONVERSATION_ID] = conversation['id']

return EndpointConfig(
message_template='Responses API with {gen_ai.request.model!r}',
span_data=span_data,
Expand Down Expand Up @@ -164,6 +172,17 @@ def get_endpoint_config(options: FinalRequestOptions) -> EndpointConfig:
message_template='Image Generation with {request_data[model]!r}',
span_data=span_data,
)
elif url == '/conversations':
span_data = {
'request_data': json_data,
'url': url,
PROVIDER_NAME: 'openai',
OPERATION_NAME: 'conversation',
}
return EndpointConfig(
message_template='OpenAI Conversation Create',
span_data=span_data,
)
else:
span_data = {
'request_data': json_data,
Expand Down Expand Up @@ -225,6 +244,10 @@ def get_response_data(self) -> Any:
def get_attributes(self, span_data: dict[str, Any]) -> dict[str, Any]:
response = self.get_response_data()
span_data['events'] = span_data['events'] + responses_output_events(response)

if response.conversation:
span_data[CONVERSATION_ID] = response.conversation.id

return span_data


Expand Down Expand Up @@ -311,6 +334,9 @@ def on_response(response: ResponseT, span: LogfireSpan) -> ResponseT:
elif isinstance(response, ImagesResponse):
span.set_attribute('response_data', {'images': response.data})
elif isinstance(response, Response): # pragma: no branch
if response.conversation:
span.set_attribute(CONVERSATION_ID, response.conversation.id)

try:
events = json.loads(span.attributes['events']) # type: ignore
except Exception:
Expand All @@ -319,6 +345,9 @@ def on_response(response: ResponseT, span: LogfireSpan) -> ResponseT:
events += responses_output_events(response)
span.set_attribute('events', events)

if isinstance(response, Conversation):
span.set_attribute(CONVERSATION_ID, response.id)

return response


Expand Down
131 changes: 131 additions & 0 deletions tests/otel_integrations/test_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -360,6 +360,23 @@ def request_handler(request: httpx.Request) -> httpx.Response:
200,
json={'id': 'thread_abc123', 'object': 'thread', 'created_at': 1698107661, 'metadata': {}},
)
elif request.url == 'https://api.openai.com/v1/responses':
json_body = json.loads(request.content)
conversation_data = {'id': json_body['conversation']} if json_body.get('conversation') else None
return httpx.Response(
200,
json={
'id': 'resp_test_123',
'created_at': 1698107661,
'model': json_body.get('model', 'gpt-4'),
'object': 'response',
'output': [
{'type': 'message', 'role': 'assistant', 'content': [{'type': 'output_text', 'text': 'Nine'}]}
],
'conversation': conversation_data,
'usage': {'input_tokens': 10, 'output_tokens': 5, 'total_tokens': 15},
},
)
else: # pragma: no cover
raise ValueError(f'Unexpected request to {request.url!r}')

Expand Down Expand Up @@ -2803,3 +2820,117 @@ def test_openrouter_streaming_reasoning(exporter: TestExporter) -> None:
},
]
)


def test_get_endpoint_config_conversations() -> None:
"""Test that /conversations endpoint is properly configured."""
from unittest.mock import MagicMock

from logfire._internal.integrations.llm_providers.openai import (
get_endpoint_config, # pyright: ignore[reportPrivateUsage]
)

options = MagicMock()
options.url = '/conversations'
options.json_data = {}

config = get_endpoint_config(options)
assert config.message_template == 'OpenAI Conversation Create'
assert config.span_data['gen_ai.provider.name'] == 'openai'
assert config.span_data['gen_ai.operation.name'] == 'conversation'


def test_get_endpoint_config_responses_with_conversation_string() -> None:
"""Test that /responses endpoint extracts conversation ID when provided as string."""
from unittest.mock import MagicMock

from logfire._internal.integrations.llm_providers.openai import (
get_endpoint_config, # pyright: ignore[reportPrivateUsage]
)

options = MagicMock()
options.url = '/responses'
options.json_data = {
'model': 'gpt-4',
'input': 'Hello',
'conversation': 'conv_123456',
}

config = get_endpoint_config(options)
assert config.span_data['gen_ai.conversation.id'] == 'conv_123456'


def test_get_endpoint_config_responses_with_conversation_dict() -> None:
"""Test that /responses endpoint extracts conversation ID when provided as dict."""
from unittest.mock import MagicMock

from logfire._internal.integrations.llm_providers.openai import (
get_endpoint_config, # pyright: ignore[reportPrivateUsage]
)

options = MagicMock()
options.url = '/responses'
options.json_data = {
'model': 'gpt-4',
'input': 'Hello',
'conversation': {'id': 'conv_789012'},
}

config = get_endpoint_config(options)
assert config.span_data['gen_ai.conversation.id'] == 'conv_789012'


def test_on_response_handles_conversation_creation_response() -> None:
"""Test that on_response extracts conversation ID from /conversations response."""
from unittest.mock import MagicMock

from openai.types.conversations import Conversation

from logfire._internal.integrations.llm_providers.openai import (
on_response, # pyright: ignore[reportPrivateUsage]
)

response = Conversation.model_construct(id='conv_created_456')
mock_span = MagicMock()
mock_span.attributes = {}

on_response(response, mock_span)

mock_span.set_attribute.assert_any_call('gen_ai.conversation.id', 'conv_created_456')


def test_streaming_handler_extracts_conversation_id() -> None:
"""Test that OpenaiResponsesStreamState.get_attributes extracts conversation ID."""
from unittest.mock import MagicMock

from logfire._internal.integrations.llm_providers.openai import (
OpenaiResponsesStreamState, # pyright: ignore[reportPrivateUsage]
)

mock_response = MagicMock()
mock_response.output = []
mock_response.conversation = MagicMock(id='conv_stream_789')

stream_state = OpenaiResponsesStreamState()
stream_state._state._completed_response = mock_response

span_data: dict[str, Any] = {'events': []}
result = stream_state.get_attributes(span_data)

assert result['gen_ai.conversation.id'] == 'conv_stream_789'


def test_responses_api_with_conversation_id(instrumented_client: openai.Client, exporter: TestExporter) -> None:
"""Integration test for conversation ID extraction from Responses API."""
response = instrumented_client.responses.create(
model='gpt-4',
input='What is four plus five?',
conversation='conv_integration_test_123',
)
assert response.id == 'resp_test_123'
assert response.conversation is not None
assert response.conversation.id == 'conv_integration_test_123'

spans = exporter.exported_spans_as_dict(parse_json_attributes=True)
assert len(spans) == 1
assert spans[0]['attributes']['gen_ai.conversation.id'] == 'conv_integration_test_123'
Loading