from traceloop.sdk import Traceloop
from traceloop.sdk.decorators import workflow, task
from traceloop.sdk.instruments import Instruments
from openai import OpenAI
# Initialize OpenAI client
openai_client = OpenAI()
# Initialize OpenLLMetry (reads config from environment variables)
Traceloop.init(disable_batch=True, instruments={Instruments.OPENAI})
@workflow(name="simple_chat")
def simple_workflow():
completion = openai_client.chat.completions.create(
model="gpt-4o-mini",
messages=[{"role": "user", "content": "Tell me a joke"}]
)
return completion.choices[0].message.content
# Run the workflow - all LLM calls will be automatically traced
simple_workflow()
print("Check Scorecard for traces!")