Files
agentlens/examples/openai_agent.py

114 lines
3.4 KiB
Python

"""
AgentLens OpenAI Integration Example — Wrap the OpenAI client for automatic tracing.
Demonstrates:
- Wrapping openai.OpenAI() so all LLM calls are traced as spans
- Combining automatic LLM tracing with manual decision logging
- Using trace tags and metadata
Usage:
pip install vectry-agentlens openai
export OPENAI_API_KEY="sk-..."
python openai_agent.py
"""
import agentlens
from agentlens.integrations.openai import wrap_openai
import openai # pip install openai
# 1. Initialize AgentLens
agentlens.init(
api_key="your-api-key-here",
endpoint="http://localhost:4200",
)
# 2. Create and wrap the OpenAI client — all completions are now auto-traced
client = openai.OpenAI()
wrap_openai(client)
# 3. Use the wrapped client inside a trace
with agentlens.trace("email-drafting-agent", tags=["openai", "email", "demo"]):
# Decision: which model to use for this task
agentlens.log_decision(
type="TOOL_SELECTION",
chosen={
"name": "gpt-4o",
"confidence": 0.9,
"params": {"temperature": 0.7, "max_tokens": 512},
},
alternatives=[
{
"name": "gpt-4o-mini",
"confidence": 0.7,
"reason_rejected": "Task needs higher quality reasoning for tone",
},
],
reasoning="Email drafting requires nuanced tone — use the larger model.",
)
# This call is automatically captured as an LLM_CALL span
classification = client.chat.completions.create(
model="gpt-4o",
messages=[
{"role": "system", "content": "Classify the intent of this email request."},
{
"role": "user",
"content": "Write a professional follow-up email to a client "
"who hasn't responded to our proposal in 2 weeks.",
},
],
temperature=0.3,
max_tokens=100,
)
intent = classification.choices[0].message.content
print(f"Classified intent: {intent}")
# Decision: choose email style based on classification
agentlens.log_decision(
type="ROUTING",
chosen={
"name": "polite_follow_up",
"confidence": 0.88,
"params": {"tone": "professional-warm", "urgency": "medium"},
},
alternatives=[
{
"name": "formal_reminder",
"confidence": 0.65,
"reason_rejected": "Too stiff for a 2-week follow-up",
},
{
"name": "casual_check_in",
"confidence": 0.4,
"reason_rejected": "Client relationship is still formal",
},
],
reasoning="Professional-warm tone balances urgency with courtesy.",
)
# Second LLM call — also auto-captured
draft = client.chat.completions.create(
model="gpt-4o",
messages=[
{
"role": "system",
"content": "You draft professional emails. Tone: warm but professional.",
},
{
"role": "user",
"content": f"Draft a polite follow-up email. Context: {intent}",
},
],
temperature=0.7,
max_tokens=512,
)
email_body = draft.choices[0].message.content
print(f"\nDrafted email:\n{email_body}")
# 4. Shutdown
agentlens.shutdown()
print("\nDone! Check AgentLens dashboard for the 'email-drafting-agent' trace.")