Skip to content

axSpanAttributes

Variable: axSpanAttributes

const axSpanAttributes: object

Type declaration

DB_ID

DB_ID: string = 'db.id'

DB_NAMESPACE

DB_NAMESPACE: string = 'db.namespace'

DB_OPERATION_NAME

DB_OPERATION_NAME: string = 'db.operation.name'

DB_QUERY_EMBEDDINGS

DB_QUERY_EMBEDDINGS: string = 'db.query.embeddings'

DB_QUERY_EMBEDDINGS_VECTOR

DB_QUERY_EMBEDDINGS_VECTOR: string = 'db.query.embeddings.vector'

DB_QUERY_RESULT

DB_QUERY_RESULT: string = 'db.query.result'

DB_QUERY_RESULT_DISTANCE

DB_QUERY_RESULT_DISTANCE: string = 'db.query.result.distance'

DB_QUERY_RESULT_DOCUMENT

DB_QUERY_RESULT_DOCUMENT: string = 'db.query.result.document'

DB_QUERY_RESULT_ID

DB_QUERY_RESULT_ID: string = 'db.query.result.id'

DB_QUERY_RESULT_METADATA

DB_QUERY_RESULT_METADATA: string = 'db.query.result.metadata'

DB_QUERY_RESULT_SCORE

DB_QUERY_RESULT_SCORE: string = 'db.query.result.score'

DB_QUERY_RESULT_VECTOR

DB_QUERY_RESULT_VECTOR: string = 'db.query.result.vector'

DB_QUERY_TEXT

DB_QUERY_TEXT: string = 'db.query.text'

DB_SYSTEM

DB_SYSTEM: string = 'db.system'

DB_TABLE

DB_TABLE: string = 'db.table'

DB_VECTOR

DB_VECTOR: string = 'db.vector'

DB_VECTOR_QUERY_TOP_K

DB_VECTOR_QUERY_TOP_K: string = 'db.vector.query.top_k'

LLM_REQUEST_FREQUENCY_PENALTY

LLM_REQUEST_FREQUENCY_PENALTY: string = 'gen_ai.request.frequency_penalty'

LLM_REQUEST_LLM_IS_STREAMING

LLM_REQUEST_LLM_IS_STREAMING: string = 'gen_ai.request.llm_is_streaming'

LLM_REQUEST_MAX_TOKENS

LLM_REQUEST_MAX_TOKENS: string = 'gen_ai.request.max_tokens'

LLM_REQUEST_MODEL

LLM_REQUEST_MODEL: string = 'gen_ai.request.model'

LLM_REQUEST_PRESENCE_PENALTY

LLM_REQUEST_PRESENCE_PENALTY: string = 'gen_ai.request.presence_penalty'

LLM_REQUEST_STOP_SEQUENCES

LLM_REQUEST_STOP_SEQUENCES: string = 'gen_ai.request.stop_sequences'

LLM_REQUEST_TEMPERATURE

LLM_REQUEST_TEMPERATURE: string = 'gen_ai.request.temperature'

LLM_REQUEST_TOP_K

LLM_REQUEST_TOP_K: string = 'gen_ai.request.top_k'

LLM_REQUEST_TOP_P

LLM_REQUEST_TOP_P: string = 'gen_ai.request.top_p'

LLM_SYSTEM

LLM_SYSTEM: string = 'gen_ai.system'

LLM_USAGE_COMPLETION_TOKENS

LLM_USAGE_COMPLETION_TOKENS: string = 'gen_ai.usage.completion_tokens'

LLM_USAGE_PROMPT_TOKENS

LLM_USAGE_PROMPT_TOKENS: string = 'gen_ai.usage.prompt_tokens'

Defined in

src/ax/trace/index.ts:1