Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
fix(integrations): ensure that GEN_AI_AGENT_NAME is properly set for …
…GEN_AI spans under an invoke_agent span
  • Loading branch information
constantinius authored and sentrivana committed Oct 29, 2025
commit db0772cab440c3cb253802698af1454a5e2eed81
3 changes: 3 additions & 0 deletions sentry_sdk/integrations/google_genai/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ def new_generate_content_stream(self, *args, **kwargs):
chat_span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model_name)
set_span_data_for_request(chat_span, integration, model_name, contents, kwargs)
chat_span.set_data(SPANDATA.GEN_AI_RESPONSE_STREAMING, True)
chat_span.set_data(SPANDATA.GEN_AI_AGENT_NAME, model_name)

try:
stream = f(self, *args, **kwargs)
Expand Down Expand Up @@ -165,6 +166,7 @@ async def new_async_generate_content_stream(self, *args, **kwargs):
chat_span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model_name)
set_span_data_for_request(chat_span, integration, model_name, contents, kwargs)
chat_span.set_data(SPANDATA.GEN_AI_RESPONSE_STREAMING, True)
chat_span.set_data(SPANDATA.GEN_AI_AGENT_NAME, model_name)

try:
stream = await f(self, *args, **kwargs)
Expand Down Expand Up @@ -233,6 +235,7 @@ def new_generate_content(self, *args, **kwargs):
chat_span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "chat")
chat_span.set_data(SPANDATA.GEN_AI_SYSTEM, GEN_AI_SYSTEM)
chat_span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model_name)
chat_span.set_data(SPANDATA.GEN_AI_AGENT_NAME, model_name)
set_span_data_for_request(
chat_span, integration, model_name, contents, kwargs
)
Comment on lines 235 to 241
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bug: _wrap_async_generate_content omits setting SPANDATA.GEN_AI_AGENT_NAME on the chat span.
Severity: HIGH | Confidence: 1.00

🔍 Detailed Analysis

The _wrap_async_generate_content function fails to set the SPANDATA.GEN_AI_AGENT_NAME on the chat span. This omission occurs when handling async non-streaming generate_content calls, leading to inconsistent span data compared to other wrapper functions which correctly set this attribute. Consequently, monitoring attributes for these specific calls will be incomplete.

💡 Suggested Fix

Add chat_span.set_data(SPANDATA.GEN_AI_AGENT_NAME, model_name) within the _wrap_async_generate_content function, specifically after setting SPANDATA.GEN_AI_REQUEST_MODEL.

🤖 Prompt for AI Agent
Review the code at the location below. A potential bug has been identified by an AI
agent.
Verify if this is a real issue. If it is, propose a fix; if not, explain why it's not
valid.

Location: sentry_sdk/integrations/google_genai/__init__.py#L235-L241

Potential issue: The `_wrap_async_generate_content` function fails to set the
`SPANDATA.GEN_AI_AGENT_NAME` on the chat span. This omission occurs when handling async
non-streaming `generate_content` calls, leading to inconsistent span data compared to
other wrapper functions which correctly set this attribute. Consequently, monitoring
attributes for these specific calls will be incomplete.

Did we get this right? 👍 / 👎 to inform future reviews.

Expand Down
31 changes: 30 additions & 1 deletion sentry_sdk/integrations/langchain.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,6 +276,14 @@ def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs):
elif "openai" in ai_type:
span.set_data(SPANDATA.GEN_AI_SYSTEM, "openai")

agent_name = (
sentry_sdk.get_current_scope()
._contexts.get("langchain_agent", {})
.get("agent_name")
)
if agent_name:
span.set_data(SPANDATA.GEN_AI_AGENT_NAME, agent_name)

for key, attribute in DATA_FIELDS.items():
if key in all_params and all_params[key] is not None:
set_data_normalized(span, attribute, all_params[key], unpack=False)
Expand Down Expand Up @@ -428,6 +436,14 @@ def on_tool_start(self, serialized, input_str, *, run_id, **kwargs):
if tool_description is not None:
span.set_data(SPANDATA.GEN_AI_TOOL_DESCRIPTION, tool_description)

agent_name = (
sentry_sdk.get_current_scope()
._contexts.get("langchain_agent", {})
.get("agent_name")
)
if agent_name:
span.set_data(SPANDATA.GEN_AI_AGENT_NAME, agent_name)

if should_send_default_pii() and self.include_prompts:
set_data_normalized(
span,
Expand Down Expand Up @@ -756,6 +772,9 @@ def new_invoke(self, *args, **kwargs):
name=f"invoke_agent {agent_name}" if agent_name else "invoke_agent",
origin=LangchainIntegration.origin,
) as span:
sentry_sdk.get_current_scope().set_context(
"langchain_agent", {"agent_name": agent_name, "tools": tools}
)
if agent_name:
span.set_data(SPANDATA.GEN_AI_AGENT_NAME, agent_name)

Expand Down Expand Up @@ -794,6 +813,8 @@ def new_invoke(self, *args, **kwargs):
):
set_data_normalized(span, SPANDATA.GEN_AI_RESPONSE_TEXT, output)

sentry_sdk.get_current_scope().remove_context("langchain_agent")

return result

return new_invoke
Expand All @@ -814,11 +835,15 @@ def new_stream(self, *args, **kwargs):

span = start_span_function(
op=OP.GEN_AI_INVOKE_AGENT,
name=f"invoke_agent {agent_name}".strip(),
name=f"invoke_agent {agent_name}" if agent_name else "invoke_agent",
origin=LangchainIntegration.origin,
)
span.__enter__()

sentry_sdk.get_current_scope().set_context(
"langchain_agent", {"agent_name": agent_name, "tools": tools}
)

if agent_name:
span.set_data(SPANDATA.GEN_AI_AGENT_NAME, agent_name)

Expand Down Expand Up @@ -868,6 +893,8 @@ def new_iterator():
):
set_data_normalized(span, SPANDATA.GEN_AI_RESPONSE_TEXT, output)

sentry_sdk.get_current_scope().remove_context("langchain_agent")

span.__exit__(None, None, None)

async def new_iterator_async():
Expand All @@ -887,6 +914,8 @@ async def new_iterator_async():
):
set_data_normalized(span, SPANDATA.GEN_AI_RESPONSE_TEXT, output)

sentry_sdk.get_current_scope().remove_context("langchain_agent")

span.__exit__(None, None, None)

if str(type(result)) == "<class 'async_generator'>":
Expand Down