Skip to content
Open
Show file tree
Hide file tree
Changes from 4 commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
48e858a
added changes
yiphei Jan 7, 2026
5e431dc
added changes
yiphei Jan 7, 2026
06ce262
added changes
yiphei Jan 7, 2026
5a2b35d
fix attribute name
yiphei Jan 7, 2026
85c711f
Merge branch 'main' into support-custom-system
yiphei Jan 8, 2026
8df4eef
added changes
yiphei Jan 8, 2026
dd9702d
added changes
yiphei Jan 8, 2026
33c83d1
added changes
yiphei Jan 8, 2026
cd656b3
added changes
yiphei Jan 8, 2026
7f3d01f
added changes
yiphei Jan 8, 2026
3140257
added changes
yiphei Jan 8, 2026
0e079b2
Merge branch 'main' into support-custom-system
yiphei Jan 10, 2026
1bbd297
added changes
yiphei Jan 10, 2026
e8cbbaa
added changes
yiphei Jan 10, 2026
f33ddf3
added changes
yiphei Jan 10, 2026
36bba2f
refactored tests
yiphei Jan 10, 2026
fb2d2c0
Merge branch 'main' into support-custom-system
yiphei Jan 12, 2026
3cb7f36
added changes
yiphei Jan 12, 2026
d78496d
added changes
yiphei Jan 12, 2026
8b6085c
added changes
yiphei Jan 12, 2026
190fb88
Merge branch 'main' into support-custom-system
yiphei Jan 13, 2026
a200de6
added changes
yiphei Jan 13, 2026
fc506ab
added changes
yiphei Jan 13, 2026
ca42b29
added changes
yiphei Jan 13, 2026
85186ae
added changes
yiphei Jan 13, 2026
28e7ee0
Merge branch 'main' into support-custom-system
yiphei Jan 15, 2026
3f9255a
added changes
yiphei Jan 15, 2026
c702827
added changes
yiphei Jan 15, 2026
c0d1f2d
Merge branch 'main' into support-custom-system
yiphei Jan 19, 2026
a2aa776
Merge pull request #7 from yiphei/support-custom-system-pt2
yiphei Jan 19, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions logfire/_internal/integrations/llm_providers/llm_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ def instrument_llm_provider(
get_endpoint_config_fn: Callable[[Any], EndpointConfig],
on_response_fn: Callable[[Any, LogfireSpan], Any],
is_async_client_fn: Callable[[type[Any]], bool],
model_provider: str | None = None,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

just call it provider

) -> AbstractContextManager[None]:
"""Instruments the provided `client` (or clients) with `logfire`.

Expand Down Expand Up @@ -93,6 +94,8 @@ def _instrumentation_setup(*args: Any, **kwargs: Any) -> Any:
return None, None, kwargs

span_data['async'] = is_async
if model_provider is not None:
span_data['overridden_model_provider'] = model_provider
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

rather just set gen_ai.system (and gen_ai.provider.name, the new attribute) here


if kwargs.get('stream') and stream_state_cls:
stream_cls = kwargs['stream_cls']
Expand Down
8 changes: 5 additions & 3 deletions logfire/_internal/integrations/llm_providers/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,9 @@ def on_response(response: ResponseT, span: LogfireSpan) -> ResponseT:
on_response(response.parse(), span) # type: ignore
return cast('ResponseT', response)

span.set_attribute('gen_ai.system', 'openai')
model_provider: str = cast(str, (span.attributes or {}).get('overridden_model_provider', "openai"))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

span.attributes won't work when the span isn't recording

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@alexmojaki then how should i check if overridden_model_provider is set, or, as you proposed, check that gen_ai.system hasnt already been set upstream

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

use getattr(span, 'attributes', None)


span.set_attribute('gen_ai.system', model_provider)

if isinstance(response_model := getattr(response, 'model', None), str):
span.set_attribute('gen_ai.response.model', response_model)
Expand All @@ -194,12 +196,12 @@ def on_response(response: ResponseT, span: LogfireSpan) -> ResponseT:
response_data = response.model_dump() # type: ignore
usage_data = extract_usage(
response_data,
provider_id='openai',
provider_id=model_provider,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

i think leave this as is, since we're using the openai client we should be able to assume the shape of the usage.

api_flavor='responses' if isinstance(response, Response) else 'chat',
)
span.set_attribute(
'operation.cost',
float(calc_price(usage_data.usage, model_ref=response_model, provider_id='openai').total_price),
float(calc_price(usage_data.usage, model_ref=response_model, provider_id=model_provider).total_price),
)
except Exception:
pass
Expand Down
2 changes: 2 additions & 0 deletions logfire/_internal/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -1171,6 +1171,7 @@ def instrument_openai(
| None = None,
*,
suppress_other_instrumentation: bool = True,
model_provider: str = "openai",
) -> AbstractContextManager[None]:
"""Instrument an OpenAI client so that spans are automatically created for each request.

Expand Down Expand Up @@ -1237,6 +1238,7 @@ def instrument_openai(
get_endpoint_config,
on_response,
is_async_client,
model_provider,
)

def instrument_openai_agents(self) -> None:
Expand Down
Loading