When you create your LlamaIndex application, configure the global handler to send traces to Braintrust:
llamaindex_braintrust.py
Copy
Ask AI
import osimport llama_index.corefrom dotenv import load_dotenvfrom llama_index.core.llms import ChatMessagefrom llama_index.llms.openai import OpenAIload_dotenv()# Configure LlamaIndex to send OTel traces to Braintrust# Note: "arize_phoenix" is LlamaIndex's OTel handler name.# We redirect it to Braintrust by overriding the endpoint.braintrust_api_url = os.environ.get("BRAINTRUST_API_URL", "https://api.braintrust.dev")llama_index.core.set_global_handler("arize_phoenix", endpoint=f"{braintrust_api_url}/otel/v1/traces")# Your LlamaIndex application codemessages = [ ChatMessage(role="system", content="Speak like a pirate. ARRR!"), ChatMessage(role="user", content="What do llamas sound like?"),]result = OpenAI().chat(messages)print(result)