From 828fbcacaecafff1fc0cc085ea2ff2cd4b1f426d Mon Sep 17 00:00:00 2001 From: doronkopit5 Date: Tue, 15 Oct 2024 14:04:54 +0300 Subject: [PATCH] Add attribute to span for LLM request type in dispatcher wrapper Set LLM_REQUEST_TYPE attribute to span for tracking chat requests. This change ensures proper monitoring and categorization. (#XYZ-123) --- .../instrumentation/llamaindex/dispatcher_wrapper.py | 1 + .../tests/test_agents.py | 2 ++ 2 files changed, 3 insertions(+) diff --git a/packages/opentelemetry-instrumentation-llamaindex/opentelemetry/instrumentation/llamaindex/dispatcher_wrapper.py b/packages/opentelemetry-instrumentation-llamaindex/opentelemetry/instrumentation/llamaindex/dispatcher_wrapper.py index 4981c726a..cb3cf1891 100644 --- a/packages/opentelemetry-instrumentation-llamaindex/opentelemetry/instrumentation/llamaindex/dispatcher_wrapper.py +++ b/packages/opentelemetry-instrumentation-llamaindex/opentelemetry/instrumentation/llamaindex/dispatcher_wrapper.py @@ -47,6 +47,7 @@ def instrument_with_dispatcher(tracer: Tracer): @dont_throw def _set_llm_chat_request(event, span) -> None: model_dict = event.model_dict + span.set_attribute(SpanAttributes.LLM_REQUEST_TYPE, LLMRequestTypeValues.CHAT.value) span.set_attribute(SpanAttributes.LLM_REQUEST_MODEL, model_dict.get("model")) span.set_attribute( SpanAttributes.LLM_REQUEST_TEMPERATURE, model_dict.get("temperature") diff --git a/packages/opentelemetry-instrumentation-llamaindex/tests/test_agents.py b/packages/opentelemetry-instrumentation-llamaindex/tests/test_agents.py index aee2e17be..c14632793 100644 --- a/packages/opentelemetry-instrumentation-llamaindex/tests/test_agents.py +++ b/packages/opentelemetry-instrumentation-llamaindex/tests/test_agents.py @@ -72,6 +72,7 @@ def multiply(a: int, b: int) -> int: assert llm_span_1.parent is not None assert llm_span_2.parent is not None + assert llm_span_1.attributes[SpanAttributes.LLM_REQUEST_TYPE] == "chat" assert ( llm_span_1.attributes[SpanAttributes.LLM_REQUEST_MODEL] == "gpt-3.5-turbo-0613" ) @@ -93,6 +94,7 @@ def multiply(a: int, b: int) -> int: assert llm_span_1.attributes[SpanAttributes.LLM_USAGE_PROMPT_TOKENS] == 479 assert llm_span_1.attributes[SpanAttributes.LLM_USAGE_TOTAL_TOKENS] == 522 + assert llm_span_2.attributes[SpanAttributes.LLM_REQUEST_TYPE] == "chat" assert ( llm_span_2.attributes[SpanAttributes.LLM_REQUEST_MODEL] == "gpt-3.5-turbo-0613" )