Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions logfire-api/logfire_api/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,6 +191,9 @@ def instrument_google_genai(self, *args, **kwargs) -> None: ...

def instrument_litellm(self, *args, **kwargs) -> None: ...

def instrument_langchain(self, *args, **kwargs) -> ContextManager[None]:
return nullcontext()

def instrument_aiohttp_client(self, *args, **kwargs) -> None: ...

def instrument_aiohttp_server(self, *args, **kwargs) -> None: ...
Expand Down Expand Up @@ -229,6 +232,7 @@ def shutdown(self, *args, **kwargs) -> None: ...
instrument_anthropic = DEFAULT_LOGFIRE_INSTANCE.instrument_anthropic
instrument_google_genai = DEFAULT_LOGFIRE_INSTANCE.instrument_google_genai
instrument_litellm = DEFAULT_LOGFIRE_INSTANCE.instrument_litellm
instrument_langchain = DEFAULT_LOGFIRE_INSTANCE.instrument_langchain
instrument_asyncpg = DEFAULT_LOGFIRE_INSTANCE.instrument_asyncpg
instrument_print = DEFAULT_LOGFIRE_INSTANCE.instrument_print
instrument_celery = DEFAULT_LOGFIRE_INSTANCE.instrument_celery
Expand Down
1 change: 1 addition & 0 deletions logfire-api/logfire_api/__init__.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ instrument_openai_agents = DEFAULT_LOGFIRE_INSTANCE.instrument_openai_agents
instrument_anthropic = DEFAULT_LOGFIRE_INSTANCE.instrument_anthropic
instrument_google_genai = DEFAULT_LOGFIRE_INSTANCE.instrument_google_genai
instrument_litellm = DEFAULT_LOGFIRE_INSTANCE.instrument_litellm
instrument_langchain = DEFAULT_LOGFIRE_INSTANCE.instrument_langchain
instrument_print = DEFAULT_LOGFIRE_INSTANCE.instrument_print
instrument_asyncpg = DEFAULT_LOGFIRE_INSTANCE.instrument_asyncpg
instrument_httpx = DEFAULT_LOGFIRE_INSTANCE.instrument_httpx
Expand Down
4 changes: 4 additions & 0 deletions logfire-api/logfire_api/_internal/integrations/langchain.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
import logfire
from contextlib import AbstractContextManager

def instrument_langchain(logfire_instance: logfire.Logfire) -> AbstractContextManager[None]: ...
11 changes: 11 additions & 0 deletions logfire-api/logfire_api/_internal/main.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -606,6 +606,17 @@ class Logfire:
[`openinference-instrumentation-litellm`](https://pypi.org/project/openinference-instrumentation-litellm/)
package, to which it passes `**kwargs`.
"""
def instrument_langchain(self) -> AbstractContextManager[None]:
"""Instrument LangChain to capture full execution hierarchy with tool definitions.

This patches LangChain's BaseCallbackManager to inject a callback handler
that captures the complete execution hierarchy including chains, tools,
retrievers, and LLM calls with tool definitions.

Returns:
A context manager that will revert the instrumentation when exited.
Use of this context manager is optional.
"""
def instrument_print(self) -> AbstractContextManager[None]:
"""Instrument the built-in `print` function so that calls to it are logged.

Expand Down
2 changes: 2 additions & 0 deletions logfire/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
instrument_anthropic = DEFAULT_LOGFIRE_INSTANCE.instrument_anthropic
instrument_google_genai = DEFAULT_LOGFIRE_INSTANCE.instrument_google_genai
instrument_litellm = DEFAULT_LOGFIRE_INSTANCE.instrument_litellm
instrument_langchain = DEFAULT_LOGFIRE_INSTANCE.instrument_langchain
instrument_print = DEFAULT_LOGFIRE_INSTANCE.instrument_print
instrument_asyncpg = DEFAULT_LOGFIRE_INSTANCE.instrument_asyncpg
instrument_httpx = DEFAULT_LOGFIRE_INSTANCE.instrument_httpx
Expand Down Expand Up @@ -132,6 +133,7 @@ def loguru_handler() -> Any:
'instrument_anthropic',
'instrument_google_genai',
'instrument_litellm',
'instrument_langchain',
'instrument_print',
'instrument_asyncpg',
'instrument_httpx',
Expand Down
98 changes: 98 additions & 0 deletions logfire/_internal/exporters/processor_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,6 +328,64 @@ def _tweak_fastapi_span(span: ReadableSpanDict):
span['events'] = new_events[::-1]


def _normalize_content_block(block: dict[str, Any]) -> dict[str, Any]:
"""Normalize a content block to OTel GenAI schema.

Handles:
- Text: converts 'text' field to 'content' (OTel uses 'content')
- tool_use: converts to 'tool_call' (OTel standard)
- tool_result: converts to 'tool_call_response' (OTel standard)
"""
block_type = block.get('type', 'text')

if block_type == 'text':
return {
'type': 'text',
'content': block.get('content', block.get('text', '')),
}

if block_type == 'tool_use':
return {
'type': 'tool_call',
'id': block.get('id'),
'name': block.get('name'),
'arguments': block.get('input', block.get('arguments')),
}

if block_type == 'tool_result':
return {
'type': 'tool_call_response',
'id': block.get('tool_use_id', block.get('id')),
'response': block.get('content', block.get('response')),
}

return block


def _convert_to_otel_message(msg: dict[str, Any]) -> dict[str, Any]:
"""Convert a message dict to OTel GenAI message schema with role and parts."""
otel_msg: dict[str, Any] = {'role': msg.get('role', 'user'), 'parts': []}
content = msg.get('content')
if content:
if isinstance(content, str):
otel_msg['parts'].append({'type': 'text', 'content': content})
elif isinstance(content, list):
for block in cast(list[Any], content):
if isinstance(block, dict):
otel_msg['parts'].append(_normalize_content_block(cast('dict[str, Any]', block)))
if tool_calls := msg.get('tool_calls'):
for tc in tool_calls:
otel_msg['parts'].append(
{
'type': 'tool_call',
'id': tc.get('id'),
'name': tc.get('function', {}).get('name') or tc.get('name'),
'arguments': tc.get('function', {}).get('arguments') or tc.get('args'),
}
)
return otel_msg


def _transform_langchain_span(span: ReadableSpanDict):
"""Transform spans generated by LangSmith to work better in the Logfire UI.

Expand Down Expand Up @@ -387,6 +445,19 @@ def _transform_langchain_span(span: ReadableSpanDict):
# Remove gen_ai.system=langchain as this also interferes with costs in the UI.
attributes = {k: v for k, v in attributes.items() if k != 'gen_ai.system'}

# Extract finish reason from completion data
with suppress(Exception):
completion = parsed_attributes.get('gen_ai.completion', {})
stop_reason = (
completion.get('generations', [[{}]])[0][0]
.get('message', {})
.get('kwargs', {})
.get('response_metadata', {})
.get('stop_reason')
)
if stop_reason:
new_attributes['gen_ai.response.finish_reasons'] = json.dumps([stop_reason])

# Add `all_messages_events`
with suppress(Exception):
input_messages = parsed_attributes.get('input.value', parsed_attributes.get('gen_ai.prompt', {}))['messages']
Expand Down Expand Up @@ -422,6 +493,33 @@ def _transform_langchain_span(span: ReadableSpanDict):
new_attributes['all_messages_events'] = json.dumps(message_events)
properties['all_messages_events'] = {'type': 'array'}

# Extract OTel GenAI formatted messages
input_msgs: list[dict[str, Any]] = []
output_msgs: list[dict[str, Any]] = []
system_instructions: list[Any] = []
for msg in message_events:
role = msg.get('role')
if role == 'system':
content = msg.get('content', '')
if isinstance(content, str):
system_instructions.append({'type': 'text', 'content': content})
elif isinstance(content, list):
system_instructions.extend(cast(list[Any], content))
elif role == 'assistant':
output_msgs.append(_convert_to_otel_message(msg))
else:
input_msgs.append(_convert_to_otel_message(msg))

if input_msgs:
new_attributes['gen_ai.input.messages'] = json.dumps(input_msgs)
properties['gen_ai.input.messages'] = {'type': 'array'}
if output_msgs:
new_attributes['gen_ai.output.messages'] = json.dumps(output_msgs)
properties['gen_ai.output.messages'] = {'type': 'array'}
if system_instructions:
new_attributes['gen_ai.system_instructions'] = json.dumps(system_instructions)
properties['gen_ai.system_instructions'] = {'type': 'array'}

span['attributes'] = {
**attributes,
ATTRIBUTES_JSON_SCHEMA_KEY: attributes_json_schema(properties),
Expand Down
Loading
Loading