This guide shows how to manually add tracing middleware to your LLM clients. For zero-code instrumentation, see Automatic Instrumentation in the main README.
Set up OpenTelemetry and initialize Braintrust:
import (
"context"
"log"
"go.opentelemetry.io/otel"
"go.opentelemetry.io/otel/sdk/trace"
"github.com/braintrustdata/braintrust-sdk-go"
)
func main() {
tp := trace.NewTracerProvider()
defer tp.Shutdown(context.Background())
otel.SetTracerProvider(tp)
_, err := braintrust.New(tp, braintrust.WithProject("my-project"))
if err != nil {
log.Fatal(err)
}
// Now add tracing middleware to your LLM clients below
}import (
"context"
"github.com/openai/openai-go"
"github.com/openai/openai-go/option"
traceopenai "github.com/braintrustdata/braintrust-sdk-go/trace/contrib/openai"
)
func main() {
ctx := context.Background()
client := openai.NewClient(
option.WithMiddleware(traceopenai.NewMiddleware()),
)
_, _ = client.Chat.Completions.New(ctx, openai.ChatCompletionNewParams{
Messages: []openai.ChatCompletionMessageParamUnion{
openai.UserMessage("Hello!"),
},
Model: openai.ChatModelGPT4oMini,
})
}import (
"context"
"github.com/anthropics/anthropic-sdk-go"
"github.com/anthropics/anthropic-sdk-go/option"
traceanthropic "github.com/braintrustdata/braintrust-sdk-go/trace/contrib/anthropic"
)
func main() {
ctx := context.Background()
client := anthropic.NewClient(
option.WithMiddleware(traceanthropic.NewMiddleware()),
)
_, _ = client.Messages.New(ctx, anthropic.MessageNewParams{
Model: anthropic.ModelClaudeHaiku4_5,
Messages: []anthropic.MessageParam{
anthropic.NewUserMessage(anthropic.NewTextBlock("Hello!")),
},
MaxTokens: 1024,
})
}import (
"context"
"os"
"google.golang.org/genai"
tracegenai "github.com/braintrustdata/braintrust-sdk-go/trace/contrib/genai"
)
func main() {
ctx := context.Background()
client, _ := genai.NewClient(ctx, &genai.ClientConfig{
HTTPClient: tracegenai.Client(),
APIKey: os.Getenv("GOOGLE_API_KEY"),
Backend: genai.BackendGeminiAPI,
})
_, _ = client.Models.GenerateContent(ctx,
"gemini-1.5-flash",
genai.Text("Hello!"),
nil,
)
}import (
"context"
"os"
tracegenkit "github.com/braintrustdata/braintrust-sdk-go/trace/contrib/genkit"
"github.com/firebase/genkit/go/ai"
"github.com/firebase/genkit/go/genkit"
"github.com/firebase/genkit/go/plugins/googlegenai"
)
func main() {
ctx := context.Background()
g := genkit.Init(ctx,
genkit.WithPlugins(&googlegenai.GoogleAI{
APIKey: os.Getenv("GOOGLE_API_KEY"),
}),
genkit.WithDefaultModel("googleai/gemini-2.5-flash"),
)
_, _ = genkit.Generate(ctx, g,
ai.WithPrompt("Hello!"),
ai.WithMiddleware(tracegenkit.NewMiddleware()),
)
}Use trace/contrib/genkit as the top-level tracing layer for Genkit requests. Avoid combining it with lower-level provider integrations such as trace/contrib/openai, trace/contrib/anthropic, or trace/contrib/genai on the same request path, or you may emit nested LLM spans.
With orchestrion, ai.WithMiddleware(tracegenkit.NewMiddleware()) is injected automatically into all genkit.Generate, genkit.GenerateText, and genkit.GenerateStream calls.
import (
"context"
"os"
"github.com/sashabaranov/go-openai"
traceopenai "github.com/braintrustdata/braintrust-sdk-go/trace/contrib/github.com/sashabaranov/go-openai"
)
func main() {
ctx := context.Background()
config := openai.DefaultConfig(os.Getenv("OPENAI_API_KEY"))
config.HTTPClient = traceopenai.Client()
client := openai.NewClientWithConfig(config)
_, _ = client.CreateChatCompletion(ctx, openai.ChatCompletionRequest{
Model: openai.GPT4oMini,
Messages: []openai.ChatCompletionMessage{
{Role: openai.ChatMessageRoleUser, Content: "Hello!"},
},
})
}import (
"context"
"github.com/tmc/langchaingo/llms"
"github.com/tmc/langchaingo/llms/openai"
tracelangchaingo "github.com/braintrustdata/braintrust-sdk-go/trace/contrib/langchaingo"
)
func main() {
ctx := context.Background()
handler := tracelangchaingo.NewHandler()
llm, _ := openai.New(openai.WithCallback(handler))
_, _ = llm.GenerateContent(ctx, []llms.MessageContent{
llms.TextParts(llms.ChatMessageTypeHuman, "Hello!"),
})
}For richer traces, use NewHandlerWithOptions with TracerProvider, Model, and Provider options.
See examples/langchaingo for complete examples.