Skip to main content
LangWatch supports tracing Google Gemini models through Google Cloud Vertex AI’s OpenAI-compatible endpoint.

Installation

go get github.com/langwatch/langwatch/sdk-go github.com/openai/openai-go

Usage

The LangWatch API key is configured by default via the LANGWATCH_API_KEY environment variable.
Configure the OpenAI client with your Gemini endpoint and API key:
package main

import (
	"context"
	"log"
	"os"

	otelopenai "github.com/langwatch/langwatch/sdk-go/instrumentation/openai"
	"github.com/openai/openai-go"
	oaioption "github.com/openai/openai-go/option"
)

func main() {
	ctx := context.Background()

	client := openai.NewClient(
		oaioption.WithAPIKey(os.Getenv("GEMINI_API_KEY")),
		oaioption.WithBaseURL(os.Getenv("GEMINI_BASE_URL")),
		oaioption.WithMiddleware(otelopenai.Middleware("<project_name>",
			otelopenai.WithCaptureInput(),
			otelopenai.WithCaptureOutput(),
		)),
	)

	response, err := client.Chat.Completions.New(ctx, openai.ChatCompletionNewParams{
		Model: "gemini-2.5-flash",
		Messages: []openai.ChatCompletionMessageParamUnion{
			openai.SystemMessage("You are a helpful assistant."),
			openai.UserMessage("Hello, Gemini!"),
		},
	})
	if err != nil {
		log.Fatalf("Chat completion failed: %v", err)
	}

	log.Printf("Chat completion: %s", response.Choices[0].Message.Content)
}
Set GEMINI_BASE_URL to your Vertex AI endpoint URL and use your Google Cloud access token as the API key.