-
Notifications
You must be signed in to change notification settings - Fork 214
feat: add logfire.url_from_eval(report) method
#1694
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 1 commit
d654bed
17de3ed
7d6cc6e
9e09e23
0b2ad3e
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -82,6 +82,7 @@ | |
| from flask.app import Flask | ||
| from opentelemetry.instrumentation.asgi.types import ClientRequestHook, ClientResponseHook, ServerRequestHook | ||
| from opentelemetry.metrics import _Gauge as Gauge | ||
| from pydantic_evals.reporting import EvaluationReport | ||
| from pymongo.monitoring import CommandFailedEvent, CommandStartedEvent, CommandSucceededEvent | ||
| from sqlalchemy import Engine | ||
| from sqlalchemy.ext.asyncio import AsyncEngine | ||
|
|
@@ -876,6 +877,22 @@ def force_flush(self, timeout_millis: int = 3_000) -> bool: # pragma: no cover | |
| """ | ||
| return self._config.force_flush(timeout_millis) | ||
|
|
||
| def url_from_eval(self, report: EvaluationReport[Any, Any, Any]) -> str | None: | ||
| """Generate a Logfire URL to view an evaluation report. | ||
|
|
||
| Args: | ||
| report: An evaluation report from `pydantic_evals`. | ||
|
|
||
| Returns: | ||
| The URL string, or `None` if the project URL or trace/span IDs are not available. | ||
| """ | ||
| project_url = self._config.project_url | ||
| trace_id = report.trace_id | ||
| span_id = report.span_id | ||
| if not project_url or not trace_id or not span_id: | ||
| return None | ||
| return f'{project_url}/evals/compare?experiment={trace_id}-{span_id}' | ||
|
||
|
|
||
| def log_slow_async_callbacks(self, slow_duration: float = 0.1) -> AbstractContextManager[None]: | ||
| """Log a warning whenever a function running in the asyncio event loop blocks for too long. | ||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,64 @@ | ||
| from __future__ import annotations | ||
|
|
||
| import pytest | ||
|
|
||
| try: | ||
| from pydantic_evals.reporting import EvaluationReport | ||
| except Exception: | ||
| pytest.skip('pydantic_evals not importable (likely pydantic < 2.8)', allow_module_level=True) | ||
|
|
||
| import logfire | ||
| from logfire._internal.config import LogfireConfig | ||
|
|
||
|
|
||
| def _make_report(trace_id: str | None = None, span_id: str | None = None) -> EvaluationReport: | ||
| return EvaluationReport(name='test', cases=[], trace_id=trace_id, span_id=span_id) | ||
|
|
||
|
|
||
| def test_url_from_eval_with_project_url() -> None: | ||
| config = LogfireConfig(send_to_logfire=False, console=False) | ||
| config.project_url = 'https://logfire.pydantic.dev/my-org/my-project' | ||
| instance = logfire.Logfire(config=config) | ||
|
|
||
| report = _make_report(trace_id='abc123', span_id='def456') | ||
| result = instance.url_from_eval(report) | ||
| assert result == 'https://logfire.pydantic.dev/my-org/my-project/evals/compare?experiment=abc123-def456' | ||
|
|
||
|
|
||
| def test_url_from_eval_no_project_url() -> None: | ||
| config = LogfireConfig(send_to_logfire=False, console=False) | ||
| instance = logfire.Logfire(config=config) | ||
|
|
||
| report = _make_report(trace_id='abc123', span_id='def456') | ||
| result = instance.url_from_eval(report) | ||
| assert result is None | ||
|
|
||
|
|
||
| def test_url_from_eval_no_trace_id() -> None: | ||
| config = LogfireConfig(send_to_logfire=False, console=False) | ||
| config.project_url = 'https://logfire.pydantic.dev/my-org/my-project' | ||
| instance = logfire.Logfire(config=config) | ||
|
|
||
| report = _make_report(span_id='def456') | ||
| result = instance.url_from_eval(report) | ||
| assert result is None | ||
|
|
||
|
|
||
| def test_url_from_eval_no_span_id() -> None: | ||
| config = LogfireConfig(send_to_logfire=False, console=False) | ||
| config.project_url = 'https://logfire.pydantic.dev/my-org/my-project' | ||
| instance = logfire.Logfire(config=config) | ||
|
|
||
| report = _make_report(trace_id='abc123') | ||
| result = instance.url_from_eval(report) | ||
| assert result is None | ||
|
|
||
|
|
||
| def test_url_from_eval_no_ids() -> None: | ||
| config = LogfireConfig(send_to_logfire=False, console=False) | ||
| config.project_url = 'https://logfire.pydantic.dev/my-org/my-project' | ||
| instance = logfire.Logfire(config=config) | ||
|
|
||
| report = _make_report() | ||
| result = instance.url_from_eval(report) | ||
| assert result is None |
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Uh oh!
There was an error while loading. Please reload this page.