1+ import contextvars
12import itertools
23import warnings
34from collections import OrderedDict
45from functools import wraps
6+ import sys
57
68import sentry_sdk
79from sentry_sdk .ai .monitoring import set_ai_pipeline_name
7375}
7476
7577
78+ # Contextvar to track agent names in a stack for re-entrant agent support
79+ _agent_stack = contextvars .ContextVar ("langchain_agent_stack" , default = None ) # type: contextvars.ContextVar[Optional[List[Optional[str]]]]
80+
81+
82+ def _push_agent (agent_name ):
83+ # type: (Optional[str]) -> None
84+ """Push an agent name onto the stack."""
85+ stack = _agent_stack .get ()
86+ if stack is None :
87+ stack = []
88+ else :
89+ # Copy the list to maintain contextvar isolation across async contexts
90+ stack = stack .copy ()
91+ stack .append (agent_name )
92+ _agent_stack .set (stack )
93+
94+
95+ def _pop_agent ():
96+ # type: () -> Optional[str]
97+ """Pop an agent name from the stack and return it."""
98+ stack = _agent_stack .get ()
99+ if stack :
100+ # Copy the list to maintain contextvar isolation across async contexts
101+ stack = stack .copy ()
102+ agent_name = stack .pop ()
103+ _agent_stack .set (stack )
104+ return agent_name
105+ return None
106+
107+
108+ def _get_current_agent ():
109+ # type: () -> Optional[str]
110+ """Get the current agent name (top of stack) without removing it."""
111+ stack = _agent_stack .get ()
112+ if stack :
113+ return stack [- 1 ]
114+ return None
115+
116+
76117class LangchainIntegration (Integration ):
77118 identifier = "langchain"
78119 origin = f"auto.ai.{ identifier } "
@@ -283,6 +324,10 @@ def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs):
283324 elif "openai" in ai_type :
284325 span .set_data (SPANDATA .GEN_AI_SYSTEM , "openai" )
285326
327+ agent_name = _get_current_agent ()
328+ if agent_name :
329+ span .set_data (SPANDATA .GEN_AI_AGENT_NAME , agent_name )
330+
286331 for key , attribute in DATA_FIELDS .items ():
287332 if key in all_params and all_params [key ] is not None :
288333 set_data_normalized (span , attribute , all_params [key ], unpack = False )
@@ -435,6 +480,10 @@ def on_tool_start(self, serialized, input_str, *, run_id, **kwargs):
435480 if tool_description is not None :
436481 span .set_data (SPANDATA .GEN_AI_TOOL_DESCRIPTION , tool_description )
437482
483+ agent_name = _get_current_agent ()
484+ if agent_name :
485+ span .set_data (SPANDATA .GEN_AI_AGENT_NAME , agent_name )
486+
438487 if should_send_default_pii () and self .include_prompts :
439488 set_data_normalized (
440489 span ,
@@ -763,45 +812,50 @@ def new_invoke(self, *args, **kwargs):
763812 name = f"invoke_agent { agent_name } " if agent_name else "invoke_agent" ,
764813 origin = LangchainIntegration .origin ,
765814 ) as span :
766- if agent_name :
767- span .set_data (SPANDATA .GEN_AI_AGENT_NAME , agent_name )
768-
769- span .set_data (SPANDATA .GEN_AI_OPERATION_NAME , "invoke_agent" )
770- span .set_data (SPANDATA .GEN_AI_RESPONSE_STREAMING , False )
771-
772- _set_tools_on_span (span , tools )
773-
774- # Run the agent
775- result = f (self , * args , ** kwargs )
776-
777- input = result .get ("input" )
778- if (
779- input is not None
780- and should_send_default_pii ()
781- and integration .include_prompts
782- ):
783- normalized_messages = normalize_message_roles ([input ])
784- scope = sentry_sdk .get_current_scope ()
785- messages_data = truncate_and_annotate_messages (
786- normalized_messages , span , scope
787- )
788- if messages_data is not None :
789- set_data_normalized (
790- span ,
791- SPANDATA .GEN_AI_REQUEST_MESSAGES ,
792- messages_data ,
793- unpack = False ,
815+ _push_agent (agent_name )
816+ try :
817+ if agent_name :
818+ span .set_data (SPANDATA .GEN_AI_AGENT_NAME , agent_name )
819+
820+ span .set_data (SPANDATA .GEN_AI_OPERATION_NAME , "invoke_agent" )
821+ span .set_data (SPANDATA .GEN_AI_RESPONSE_STREAMING , False )
822+
823+ _set_tools_on_span (span , tools )
824+
825+ # Run the agent
826+ result = f (self , * args , ** kwargs )
827+
828+ input = result .get ("input" )
829+ if (
830+ input is not None
831+ and should_send_default_pii ()
832+ and integration .include_prompts
833+ ):
834+ normalized_messages = normalize_message_roles ([input ])
835+ scope = sentry_sdk .get_current_scope ()
836+ messages_data = truncate_and_annotate_messages (
837+ normalized_messages , span , scope
794838 )
839+ if messages_data is not None :
840+ set_data_normalized (
841+ span ,
842+ SPANDATA .GEN_AI_REQUEST_MESSAGES ,
843+ messages_data ,
844+ unpack = False ,
845+ )
795846
796- output = result .get ("output" )
797- if (
798- output is not None
799- and should_send_default_pii ()
800- and integration .include_prompts
801- ):
802- set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , output )
847+ output = result .get ("output" )
848+ if (
849+ output is not None
850+ and should_send_default_pii ()
851+ and integration .include_prompts
852+ ):
853+ set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , output )
803854
804- return result
855+ return result
856+ finally :
857+ # Ensure agent is popped even if an exception occurs
858+ _pop_agent ()
805859
806860 return new_invoke
807861
@@ -821,11 +875,13 @@ def new_stream(self, *args, **kwargs):
821875
822876 span = start_span_function (
823877 op = OP .GEN_AI_INVOKE_AGENT ,
824- name = f"invoke_agent { agent_name } " . strip () ,
878+ name = f"invoke_agent { agent_name } " if agent_name else "invoke_agent" ,
825879 origin = LangchainIntegration .origin ,
826880 )
827881 span .__enter__ ()
828882
883+ _push_agent (agent_name )
884+
829885 if agent_name :
830886 span .set_data (SPANDATA .GEN_AI_AGENT_NAME , agent_name )
831887
@@ -860,41 +916,57 @@ def new_stream(self, *args, **kwargs):
860916
861917 def new_iterator ():
862918 # type: () -> Iterator[Any]
863- for event in old_iterator :
864- yield event
865-
919+ exc_info = (None , None , None ) # type: tuple[Any, Any, Any]
866920 try :
867- output = event .get ("output" )
868- except Exception :
869- output = None
870-
871- if (
872- output is not None
873- and should_send_default_pii ()
874- and integration .include_prompts
875- ):
876- set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , output )
921+ for event in old_iterator :
922+ yield event
877923
878- span .__exit__ (None , None , None )
924+ try :
925+ output = event .get ("output" )
926+ except Exception :
927+ output = None
928+
929+ if (
930+ output is not None
931+ and should_send_default_pii ()
932+ and integration .include_prompts
933+ ):
934+ set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , output )
935+ except Exception :
936+ exc_info = sys .exc_info ()
937+ set_span_errored (span )
938+ raise
939+ finally :
940+ # Ensure cleanup happens even if iterator is abandoned or fails
941+ _pop_agent ()
942+ span .__exit__ (* exc_info )
879943
880944 async def new_iterator_async ():
881945 # type: () -> AsyncIterator[Any]
882- async for event in old_iterator :
883- yield event
884-
946+ exc_info = (None , None , None ) # type: tuple[Any, Any, Any]
885947 try :
886- output = event .get ("output" )
887- except Exception :
888- output = None
889-
890- if (
891- output is not None
892- and should_send_default_pii ()
893- and integration .include_prompts
894- ):
895- set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , output )
948+ async for event in old_iterator :
949+ yield event
896950
897- span .__exit__ (None , None , None )
951+ try :
952+ output = event .get ("output" )
953+ except Exception :
954+ output = None
955+
956+ if (
957+ output is not None
958+ and should_send_default_pii ()
959+ and integration .include_prompts
960+ ):
961+ set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , output )
962+ except Exception :
963+ exc_info = sys .exc_info ()
964+ set_span_errored (span )
965+ raise
966+ finally :
967+ # Ensure cleanup happens even if iterator is abandoned or fails
968+ _pop_agent ()
969+ span .__exit__ (* exc_info )
898970
899971 if str (type (result )) == "<class 'async_generator'>" :
900972 result = new_iterator_async ()
0 commit comments