Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import ai.koog.agents.features.opentelemetry.attribute.KoogAttributes
import ai.koog.agents.features.opentelemetry.extension.addCommonErrorAttributes
import ai.koog.agents.features.opentelemetry.extension.systemMessages
import ai.koog.agents.features.opentelemetry.extension.toStatusData
import ai.koog.agents.features.opentelemetry.integration.SpanAdapter
import ai.koog.prompt.llm.LLModel
import ai.koog.prompt.message.Message
import io.opentelemetry.kotlin.factory.ContextFactory
Expand All @@ -30,6 +31,15 @@ import io.opentelemetry.kotlin.tracing.Tracer
*
* Custom attributes:
* - koog.event.id
*
* @param tracer The tracer to use
* @param contextFactory The context factory to use
* @param parentSpan The parent span to use
* @param id The id of the span
* @param agentId The id of the agent
* @param model The model to use
* @param messages The messages to use
* @param spanAdapter The span adapter to use
*/
internal fun startCreateAgentSpan(
tracer: Tracer,
Expand All @@ -38,7 +48,8 @@ internal fun startCreateAgentSpan(
id: String,
agentId: String,
model: LLModel,
messages: List<Message>
messages: List<Message>,
spanAdapter: SpanAdapter? = null,
): GenAIAgentSpan {
val builder = GenAIAgentSpanBuilder(
spanType = SpanType.CREATE_AGENT,
Expand Down Expand Up @@ -68,7 +79,9 @@ internal fun startCreateAgentSpan(

builder.addAttribute(KoogAttributes.Koog.Event.Id(id))

return builder.buildAndStart(tracer, contextFactory)
val span = builder.buildAndStart(tracer, contextFactory)
spanAdapter?.onBeforeSpanStarted(span)
return span
}

/**
Expand All @@ -79,11 +92,17 @@ internal fun startCreateAgentSpan(
*
* Span attribute:
* - error.type (conditional)
*
* @param span The span to end
* @param error The error to set as the span status
* @param verbose Whether to log verbose information
* @param spanAdapter The span adapter to use
*/
internal fun endCreateAgentSpan(
span: GenAIAgentSpan,
error: Throwable? = null,
verbose: Boolean = false
verbose: Boolean = false,
spanAdapter: SpanAdapter? = null,
) {
check(span.type == SpanType.CREATE_AGENT) {
"${span.logString} Expected to end span type of type: <${SpanType.CREATE_AGENT}>, but received span of type: <${span.type}>"
Expand All @@ -92,5 +111,6 @@ internal fun endCreateAgentSpan(
// error.type
span.addCommonErrorAttributes(error)

spanAdapter?.onBeforeSpanFinished(span)
span.end(error.toStatusData(), verbose)
}
Original file line number Diff line number Diff line change
@@ -1,17 +1,27 @@
package ai.koog.agents.features.opentelemetry.span

import ai.koog.agents.features.opentelemetry.attribute.CommonAttributes
import ai.koog.agents.features.opentelemetry.attribute.GenAIAttributes
import ai.koog.agents.features.opentelemetry.attribute.KoogAttributes
import ai.koog.agents.features.opentelemetry.attribute.McpAttributes
import ai.koog.agents.features.opentelemetry.extension.addCommonErrorAttributes
import ai.koog.agents.features.opentelemetry.extension.toStatusData
import ai.koog.agents.features.opentelemetry.integration.SpanAdapter
import ai.koog.agents.mcp.metadata.McpMetadataKeys
import io.github.oshai.kotlinlogging.KotlinLogging
import io.opentelemetry.kotlin.factory.ContextFactory
import io.opentelemetry.kotlin.tracing.SpanKind
import io.opentelemetry.kotlin.tracing.Tracer
import kotlinx.serialization.json.JsonElement
import kotlinx.serialization.json.JsonObject

private val logger = KotlinLogging.logger { }

/**
* Build and start a new Execute Tool Span with necessary attributes.
* Build and start a new Execute Tool Span with necessary attributes. When [mcpToolMetadata] is
* provided (i.e., the tool is sourced via MCP), the span is enriched with MCP-specific attributes
* before [SpanAdapter.onBeforeSpanStarted] is invoked, so adapters observe the fully prepared
* attribute set.
*
* Add the necessary attributes for the Execute Tool Span, according to the OpenTelemetry Semantic Convention:
* https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-spans/#execute-tool-span
Expand All @@ -26,6 +36,17 @@ import kotlinx.serialization.json.JsonObject
*
* Custom attributes:
* - koog.event.id
*
* @param tracer The tracer instance to use for creating the span.
* @param contextFactory The context factory to use for creating the span context.
* @param parentSpan The parent span for the new span.
* @param id The unique identifier for the span.
* @param toolName The name of the tool being called.
* @param toolArgs The arguments for the tool call.
* @param toolDescription The description of the tool.
* @param toolCallId The identifier for the tool call.
* @param mcpToolMetadata Optional metadata for the tool sourced via MCP.
* @param spanAdapter Optional span adapter for customizing the span behavior.
*/
internal fun startExecuteToolSpan(
tracer: Tracer,
Expand All @@ -36,6 +57,8 @@ internal fun startExecuteToolSpan(
toolArgs: JsonObject,
toolDescription: String?,
toolCallId: String?,
mcpToolMetadata: Map<String, String>? = null,
spanAdapter: SpanAdapter? = null,
): GenAIAgentSpan {
val builder = GenAIAgentSpanBuilder(
spanType = SpanType.EXECUTE_TOOL,
Expand Down Expand Up @@ -70,24 +93,58 @@ internal fun startExecuteToolSpan(

builder.addAttribute(KoogAttributes.Koog.Event.Id(id))

return builder.buildAndStart(tracer, contextFactory)
val span = builder.buildAndStart(tracer, contextFactory)

if (mcpToolMetadata != null) {
val mcpVersion = mcpToolMetadata[McpMetadataKeys.McpProtocolVersion]
val mcpTransportType = mcpToolMetadata[McpMetadataKeys.McpTransportType]
if (mcpVersion != null && mcpTransportType != null) {
span.enrichExecuteToolSpanWithMcpAttributes(
toolName = toolName,
sessionId = mcpToolMetadata[McpMetadataKeys.McpSessionId],
methodName = "tools/call",
serverPort = mcpToolMetadata[McpMetadataKeys.ServerPort]?.toIntOrNull(),
serverAddress = mcpToolMetadata[McpMetadataKeys.ServerUrl],
mcpProtocolVersion = mcpVersion,
mcpTransportType = mcpTransportType,
)
} else {
logger.error {
"MCP protocol version ($mcpVersion) and transport type ($mcpTransportType) are required " +
"for mcp tool call spans: tool=$toolName, " +
"serverUrl=${mcpToolMetadata[McpMetadataKeys.ServerUrl]}"
}
}
}

spanAdapter?.onBeforeSpanStarted(span)
return span
}

/**
* End Execute Tool Span and set final attributes.
* End Execute Tool Span and set final attributes. The provided [spanAdapter] is invoked via
* [SpanAdapter.onBeforeSpanFinished] after all attributes are set and immediately before the
* underlying span is ended.
*
* Add the necessary attributes for the Execute Tool Span, according to the OpenTelemetry Semantic Convention:
* https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-spans/#execute-tool-span
*
* Span attribute:
* - error.type (conditional)
* - gen_ai.tool.call.result (recommended)
*
* @param span The span to end.
* @param toolResult The result of the tool call.
* @param error The error that occurred during the tool call, if any.
* @param verbose Whether to log verbose information.
* @param spanAdapter Optional span adapter for customizing the span behavior.
*/
internal fun endExecuteToolSpan(
span: GenAIAgentSpan,
toolResult: JsonElement?,
error: Throwable? = null,
verbose: Boolean = false
verbose: Boolean = false,
spanAdapter: SpanAdapter? = null,
) {
check(span.type == SpanType.EXECUTE_TOOL) {
"${span.logString} Expected to end span type of type: <${SpanType.EXECUTE_TOOL}>, but received span of type: <${span.type}>"
Expand All @@ -101,5 +158,61 @@ internal fun endExecuteToolSpan(
span.addAttribute(GenAIAttributes.Tool.Call.Result(result))
}

spanAdapter?.onBeforeSpanFinished(span)
span.end(error.toStatusData(), verbose)
}

/**
* Adds MCP-specific attributes to this Execute Tool span. Caller must ensure the receiver wraps an MCP tool invocation.
* The type is not checked here. Attribute requirement levels follow the OpenTelemetry GenAI/MCP
* semantic conventions and are noted next to each `addAttribute` call.
*
* @param toolName The name of the MCP tool being called;
* @param methodName MCP method invoked;
* @param serverAddress MCP server address, when known;
* @param serverPort MCP server port, when known;
* @param sessionId MCP session identifier, when the call is part of a session;
* @param mcpProtocolVersion MCP protocol version in use;
* @param mcpTransportType Transport used to reach the MCP server ("stdio", "http");
* @return The receiver span, for chaining.
*/
private fun GenAIAgentSpan.enrichExecuteToolSpanWithMcpAttributes(
toolName: String,
methodName: String,
serverAddress: String? = null,
serverPort: Int? = null,
sessionId: String? = null,
mcpProtocolVersion: String,
mcpTransportType: String,
): GenAIAgentSpan {
// mcp.method.name (REQUIRED)
addAttribute(McpAttributes.Mcp.Method.Name(methodName))

// gen_ai.operation.name (RECOMMENDED for tool calls)
addAttribute(GenAIAttributes.Operation.Name(GenAIAttributes.Operation.OperationNameType.EXECUTE_TOOL))

// gen_ai.tool.name (CONDITIONALLY REQUIRED)
addAttribute(GenAIAttributes.Tool.Name(toolName))

// mcp.session.id (RECOMMENDED)
sessionId?.let { session ->
addAttribute(McpAttributes.Mcp.Session.Id(session))
}

// mcp.protocol.version (RECOMMENDED)
addAttribute(McpAttributes.Mcp.Protocol.Version(mcpProtocolVersion))
// network.transport (RECOMMENDED)
addAttribute(McpAttributes.Network.Transport(mcpTransportType))

// server.address (RECOMMENDED)
serverAddress?.let { address ->
addAttribute(CommonAttributes.Server.Address(address))
}

// server.port (RECOMMENDED)
serverPort?.let { port ->
addAttribute(CommonAttributes.Server.Port(port))
}

return this
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import ai.koog.agents.features.opentelemetry.extension.sumInputTokens
import ai.koog.agents.features.opentelemetry.extension.sumOutputTokens
import ai.koog.agents.features.opentelemetry.extension.systemMessages
import ai.koog.agents.features.opentelemetry.extension.toStatusData
import ai.koog.agents.features.opentelemetry.integration.SpanAdapter
import ai.koog.prompt.llm.LLMProvider
import ai.koog.prompt.llm.LLModel
import ai.koog.prompt.message.Message
Expand Down Expand Up @@ -47,6 +48,18 @@ import kotlinx.serialization.json.JsonObject
*
* Custom attributes:
* - koog.event.id
*
* @param tracer The tracer instance to use for creating the span.
* @param contextFactory The context factory to use for creating the span context.
* @param parentSpan The parent span for the new span.
* @param id The unique identifier for the span.
* @param provider The LLM provider for the inference.
* @param runId The unique identifier for the run.
* @param model The model used for inference.
* @param messages The list of messages used in the inference.
* @param llmParams The parameters for the LLM request.
* @param tools The list of tools available for the inference.
* @param spanAdapter Optional span adapter for customizing the span behavior.
*/
internal fun startInferenceSpan(
tracer: Tracer,
Expand All @@ -58,7 +71,8 @@ internal fun startInferenceSpan(
model: LLModel,
messages: List<Message>,
llmParams: LLMParams,
tools: List<ToolDescriptor>
tools: List<ToolDescriptor>,
spanAdapter: SpanAdapter? = null,
): GenAIAgentSpan {
val builder = GenAIAgentSpanBuilder(
spanType = SpanType.INFERENCE,
Expand Down Expand Up @@ -128,11 +142,15 @@ internal fun startInferenceSpan(

builder.addAttribute(KoogAttributes.Koog.Event.Id(id))

return builder.buildAndStart(tracer, contextFactory)
val span = builder.buildAndStart(tracer, contextFactory)
spanAdapter?.onBeforeSpanStarted(span)
return span
}

/**
* End Inference Span and set final attributes.
* End Inference Span and set final attributes. The provided [spanAdapter] is invoked via
* [SpanAdapter.onBeforeSpanFinished] after all attributes are set and immediately before the
* underlying span is ended.
*
* Add the necessary attributes for the Inference Span according to the OpenTelemetry Semantic Convention:
* https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-spans/#inference
Expand All @@ -146,13 +164,21 @@ internal fun startInferenceSpan(
* - gen_ai.usage.input_tokens (recommended)
* - gen_ai.usage.output_tokens (recommended)
* - gen_ai.output.messages (recommended)
*
* @param span The span to end.
* @param messages The list of messages used in the inference.
* @param model The model used for inference.
* @param error The error that occurred during inference if any.
* @param verbose Whether to log verbose information.
* @param spanAdapter Optional span adapter for customizing the span behavior.
*/
internal fun endInferenceSpan(
span: GenAIAgentSpan,
messages: List<Message>,
model: LLModel,
error: Throwable? = null,
verbose: Boolean = false
verbose: Boolean = false,
spanAdapter: SpanAdapter? = null,
) {
check(span.type == SpanType.INFERENCE) {
"${span.logString} Expected to end span type of type: <${SpanType.INFERENCE}>, but received span of type: <${span.type}>"
Expand Down Expand Up @@ -184,5 +210,6 @@ internal fun endInferenceSpan(
span.addAttribute(GenAIAttributes.Output.Messages(messages))
}

spanAdapter?.onBeforeSpanFinished(span)
span.end(error.toStatusData(), verbose)
}
Loading
Loading