1- import contextvars
21import itertools
32import sys
43import json
@@ -162,44 +161,6 @@ def _transform_langchain_message_content(content: "Any") -> "Any":
162161 return content
163162
164163
165- # Contextvar to track agent names in a stack for re-entrant agent support
166- _agent_stack : "contextvars.ContextVar[Optional[List[Optional[str]]]]" = (
167- contextvars .ContextVar ("langchain_agent_stack" , default = None )
168- )
169-
170-
171- def _push_agent (agent_name : "Optional[str]" ) -> None :
172- """Push an agent name onto the stack."""
173- stack = _agent_stack .get ()
174- if stack is None :
175- stack = []
176- else :
177- # Copy the list to maintain contextvar isolation across async contexts
178- stack = stack .copy ()
179- stack .append (agent_name )
180- _agent_stack .set (stack )
181-
182-
183- def _pop_agent () -> "Optional[str]" :
184- """Pop an agent name from the stack and return it."""
185- stack = _agent_stack .get ()
186- if stack :
187- # Copy the list to maintain contextvar isolation across async contexts
188- stack = stack .copy ()
189- agent_name = stack .pop ()
190- _agent_stack .set (stack )
191- return agent_name
192- return None
193-
194-
195- def _get_current_agent () -> "Optional[str]" :
196- """Get the current agent name (top of stack) without removing it."""
197- stack = _agent_stack .get ()
198- if stack :
199- return stack [- 1 ]
200- return None
201-
202-
203164def _get_system_instructions (messages : "List[List[BaseMessage]]" ) -> "List[str]" :
204165 system_instructions = []
205166
@@ -465,9 +426,11 @@ def on_chat_model_start(
465426 if ai_system :
466427 span .set_data (SPANDATA .GEN_AI_SYSTEM , ai_system )
467428
468- agent_name = _get_current_agent ()
469- if agent_name :
470- span .set_data (SPANDATA .GEN_AI_AGENT_NAME , agent_name )
429+ agent_metadata = kwargs .get ("metadata" )
430+ if isinstance (agent_metadata , dict ) and "lc_agent_name" in agent_metadata :
431+ span .set_data (
432+ SPANDATA .GEN_AI_AGENT_NAME , agent_metadata ["lc_agent_name" ]
433+ )
471434
472435 for key , attribute in DATA_FIELDS .items ():
473436 if key in all_params and all_params [key ] is not None :
@@ -665,9 +628,11 @@ def on_tool_start(
665628 if tool_description is not None :
666629 span .set_data (SPANDATA .GEN_AI_TOOL_DESCRIPTION , tool_description )
667630
668- agent_name = _get_current_agent ()
669- if agent_name :
670- span .set_data (SPANDATA .GEN_AI_AGENT_NAME , agent_name )
631+ agent_metadata = kwargs .get ("metadata" )
632+ if isinstance (agent_metadata , dict ) and "lc_agent_name" in agent_metadata :
633+ span .set_data (
634+ SPANDATA .GEN_AI_AGENT_NAME , agent_metadata ["lc_agent_name" ]
635+ )
671636
672637 if should_send_default_pii () and self .include_prompts :
673638 set_data_normalized (
@@ -987,58 +952,53 @@ def new_invoke(self: "Any", *args: "Any", **kwargs: "Any") -> "Any":
987952 if integration is None :
988953 return f (self , * args , ** kwargs )
989954
990- agent_name , tools = _get_request_data (self , args , kwargs )
955+ run_name , tools = _get_request_data (self , args , kwargs )
991956 start_span_function = get_start_span_function ()
992957
993958 with start_span_function (
994959 op = OP .GEN_AI_INVOKE_AGENT ,
995- name = f"invoke_agent { agent_name } " if agent_name else "invoke_agent" ,
960+ name = f"invoke_agent { run_name } " if run_name else "invoke_agent" ,
996961 origin = LangchainIntegration .origin ,
997962 ) as span :
998- _push_agent (agent_name )
999- try :
1000- if agent_name :
1001- span .set_data (SPANDATA .GEN_AI_AGENT_NAME , agent_name )
963+ if run_name :
964+ span .set_data (SPANDATA .GEN_AI_AGENT_NAME , run_name )
1002965
1003- span .set_data (SPANDATA .GEN_AI_OPERATION_NAME , "invoke_agent" )
1004- span .set_data (SPANDATA .GEN_AI_RESPONSE_STREAMING , False )
966+ span .set_data (SPANDATA .GEN_AI_OPERATION_NAME , "invoke_agent" )
967+ span .set_data (SPANDATA .GEN_AI_RESPONSE_STREAMING , False )
1005968
1006- _set_tools_on_span (span , tools )
969+ _set_tools_on_span (span , tools )
1007970
1008- # Run the agent
1009- result = f (self , * args , ** kwargs )
971+ # Run the agent
972+ result = f (self , * args , ** kwargs )
1010973
1011- input = result .get ("input" )
1012- if (
1013- input is not None
1014- and should_send_default_pii ()
1015- and integration .include_prompts
1016- ):
1017- normalized_messages = normalize_message_roles ([input ])
1018- scope = sentry_sdk .get_current_scope ()
1019- messages_data = truncate_and_annotate_messages (
1020- normalized_messages , span , scope
974+ input = result .get ("input" )
975+ if (
976+ input is not None
977+ and should_send_default_pii ()
978+ and integration .include_prompts
979+ ):
980+ normalized_messages = normalize_message_roles ([input ])
981+ scope = sentry_sdk .get_current_scope ()
982+ messages_data = truncate_and_annotate_messages (
983+ normalized_messages , span , scope
984+ )
985+ if messages_data is not None :
986+ set_data_normalized (
987+ span ,
988+ SPANDATA .GEN_AI_REQUEST_MESSAGES ,
989+ messages_data ,
990+ unpack = False ,
1021991 )
1022- if messages_data is not None :
1023- set_data_normalized (
1024- span ,
1025- SPANDATA .GEN_AI_REQUEST_MESSAGES ,
1026- messages_data ,
1027- unpack = False ,
1028- )
1029992
1030- output = result .get ("output" )
1031- if (
1032- output is not None
1033- and should_send_default_pii ()
1034- and integration .include_prompts
1035- ):
1036- set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , output )
993+ output = result .get ("output" )
994+ if (
995+ output is not None
996+ and should_send_default_pii ()
997+ and integration .include_prompts
998+ ):
999+ set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , output )
10371000
1038- return result
1039- finally :
1040- # Ensure agent is popped even if an exception occurs
1041- _pop_agent ()
1001+ return result
10421002
10431003 return new_invoke
10441004
@@ -1050,20 +1010,18 @@ def new_stream(self: "Any", *args: "Any", **kwargs: "Any") -> "Any":
10501010 if integration is None :
10511011 return f (self , * args , ** kwargs )
10521012
1053- agent_name , tools = _get_request_data (self , args , kwargs )
1013+ run_name , tools = _get_request_data (self , args , kwargs )
10541014 start_span_function = get_start_span_function ()
10551015
10561016 span = start_span_function (
10571017 op = OP .GEN_AI_INVOKE_AGENT ,
1058- name = f"invoke_agent { agent_name } " if agent_name else "invoke_agent" ,
1018+ name = f"invoke_agent { run_name } " if run_name else "invoke_agent" ,
10591019 origin = LangchainIntegration .origin ,
10601020 )
10611021 span .__enter__ ()
10621022
1063- _push_agent (agent_name )
1064-
1065- if agent_name :
1066- span .set_data (SPANDATA .GEN_AI_AGENT_NAME , agent_name )
1023+ if run_name :
1024+ span .set_data (SPANDATA .GEN_AI_AGENT_NAME , run_name )
10671025
10681026 span .set_data (SPANDATA .GEN_AI_OPERATION_NAME , "invoke_agent" )
10691027 span .set_data (SPANDATA .GEN_AI_RESPONSE_STREAMING , True )
@@ -1117,7 +1075,6 @@ def new_iterator() -> "Iterator[Any]":
11171075 raise
11181076 finally :
11191077 # Ensure cleanup happens even if iterator is abandoned or fails
1120- _pop_agent ()
11211078 span .__exit__ (* exc_info )
11221079
11231080 async def new_iterator_async () -> "AsyncIterator[Any]" :
@@ -1143,7 +1100,6 @@ async def new_iterator_async() -> "AsyncIterator[Any]":
11431100 raise
11441101 finally :
11451102 # Ensure cleanup happens even if iterator is abandoned or fails
1146- _pop_agent ()
11471103 span .__exit__ (* exc_info )
11481104
11491105 if str (type (result )) == "<class 'async_generator'>" :
0 commit comments