Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
OPENAI_API_KEY=
8 changes: 8 additions & 0 deletions util/opentelemetry-util-genai-openlit-translator/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
.env

__pycache__/
.vscode/
*.pyc
.DS_Store

# Ignore example output files
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# Changelog

All notable changes to this repository are documented in this file.

## Version 0.1.5 - 2025-11-07

- Initial 0.1.5 release of splunk-otel-util-genai-translator-openlit
112 changes: 112 additions & 0 deletions util/opentelemetry-util-genai-openlit-translator/README.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
OpenTelemetry GenAI OpenLit Translator
=========================================

This package automatically translates openlit sdk instrumented spans into OpenTelemetry GenAI semantic conventions.
It intercepts spans with ```gen_ai.*``` openlit specific attributes and creates corresponding spans with ``gen_ai.*`` semantic convention compliant attributes,
enabling seamless integration between openlit instrumentation and GenAI observability tools.

Mapping Table
-------------

.. list-table::
:header-rows: 1
:widths: 50 50

* - Old Key (OpenLit)
- New Key (OTel SemConv)
* - ``gen_ai.completion.0.content``
- ``gen_ai.output.messages``
* - ``gen_ai.prompt.0.content``
- ``gen_ai.input.messages``
* - ``gen_ai.prompt``
- ``gen_ai.input.messages``
* - ``gen_ai.completion``
- ``gen_ai.output.messages``
* - ``gen_ai.content.prompt``
- ``gen_ai.input.messages``
* - ``gen_ai.content.completion``
- ``gen_ai.output.messages``
* - ``gen_ai.request.embedding_dimension``
- ``gen_ai.embeddings.dimension.count``
* - ``gen_ai.token.usage.input``
- ``gen_ai.usage.input_tokens``
* - ``gen_ai.token.usage.output``
- ``gen_ai.usage.output_tokens``
* - ``gen_ai.llm.provider``
- ``gen_ai.provider.name``
* - ``gen_ai.llm.model``
- ``gen_ai.request.model``
* - ``gen_ai.llm.temperature``
- ``gen_ai.request.temperature``
* - ``gen_ai.llm.max_tokens``
- ``gen_ai.request.max_tokens``
* - ``gen_ai.llm.top_p``
- ``gen_ai.request.top_p``
* - ``gen_ai.operation.type``
- ``gen_ai.operation.name``
* - ``gen_ai.output_messages``
- ``gen_ai.output.messages``
* - ``gen_ai.session.id``
- ``gen_ai.conversation.id``
* - ``gen_ai.openai.thread.id``
- ``gen_ai.conversation.id``
* - ``gen_ai.tool.args``
- ``gen_ai.tool.call.arguments``
* - ``gen_ai.tool.result``
- ``gen_ai.tool.call.result``
* - ``gen_ai.vectordb.name``
- ``db.system.name``
* - ``gen_ai.vectordb.search.query``
- ``db.query.text``
* - ``gen_ai.vectordb.search.results_count``
- ``db.response.returned_rows``


Installation
------------
.. code-block:: bash

pip install opentelemetry-util-genai-openlit-translator

Quick Start (Automatic Registration)
-------------------------------------
The easiest way to use the translator is to simply import it - no manual setup required!

.. code-block:: python

from openai import OpenAI
import openlit
from dotenv import load_dotenv
import os
import traceback

load_dotenv()

try:
openlit.init(otlp_endpoint="http://0.0.0.0:4318")

client = OpenAI(
api_key=os.getenv("OPENAI_API_KEY")
)

chat_completion = client.chat.completions.create(
messages=[
{
"role": "user",
"content": "What is LLM Observability?",
}
],
model="gpt-3.5-turbo",
)
print("response:", chat_completion.choices[0].message.content)
except Exception as e:
print(f"An error occurred: {e}")
traceback.print_exc()


Tests
-----
.. code-block:: bash

pytest util/opentelemetry-util-genai-openlit-translator/tests

Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
#!/usr/bin/env python3

import os
import traceback

import openlit
from dotenv import load_dotenv
from openai import OpenAI

load_dotenv()

try:
openlit.init(otlp_endpoint="http://0.0.0.0:4318")

client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))

chat_completion = client.chat.completions.create(
messages=[
{
"role": "user",
"content": "What is LLM Observability?",
}
],
model="gpt-3.5-turbo",
)
print("response:", chat_completion.choices[0].message.content)
except Exception as e:
print(f"An error occurred: {e}")
traceback.print_exc()
60 changes: 60 additions & 0 deletions util/opentelemetry-util-genai-openlit-translator/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"

[project]
name = "splunk-otel-util-genai-translator-openlit"
dynamic = ["version"]
description = "openlit -> GenAI translator emitter for OpenTelemetry GenAI"
readme = "README.rst"
license = "Apache-2.0"
requires-python = ">=3.9"
authors = [
{ name = "OpenTelemetry Authors", email = "[email protected]" },
]
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
]
dependencies = [
"opentelemetry-instrumentation ~= 0.52b1",
"opentelemetry-semantic-conventions ~= 0.52b1",
"opentelemetry-api>=1.31.0",
"opentelemetry-sdk>=1.31.0",
"splunk-otel-util-genai>=0.1.4",
]

[project.entry-points.opentelemetry_configurator]

[project.optional-dependencies]
test = ["pytest>=7.0.0"]

[project.urls]
Homepage = "https://github.com/open-telemetry/opentelemetry-python-contrib"
Repository = "https://github.com/open-telemetry/opentelemetry-python-contrib"

[tool.hatch.version]
path = "src/opentelemetry/util/genai/version.py"

[tool.hatch.build.targets.sdist]
include = [
"/src",
"/tests",
]

[tool.hatch.build.targets.wheel]
packages = ["src/opentelemetry"]
include = [
"src/opentelemetry_util_genai_openlit_translator.pth",
]

[tool.hatch.build.targets.wheel.force-include]
"src/opentelemetry_util_genai_openlit_translator.pth" = "opentelemetry_util_genai_openlit_translator.pth"
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# Minimal dependencies to run examples (like examples/openlit_example.py)
# Usage: pip install -r requirements-examples.txt

-e ../opentelemetry-util-genai
-e .

# OpenTelemetry SDK pieces used in the example
opentelemetry-sdk>=1.31.1
openlit
python-dotenv
openai
# (ConsoleSpanExporter is in the SDK extras; no additional exporter deps needed.)
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# Minimal dependencies to run translator tests locally
# Install into a fresh virtualenv via: pip install -r requirements-tests.txt
# We install the dev GenAI utilities (emitter-enabled) first so the translator
# can extend opentelemetry.util.genai.*
-e ../opentelemetry-util-genai

# Install this package in editable mode
-e .

# Test runner
pytest>=7.0.0

# Optional (uncomment if you want coverage):
# pytest-cov>=4.0.0
Loading
Loading