package main
import (
"context"
"log"
"os"
"github.com/langwatch/langwatch/sdk-go"
otelopenai "github.com/langwatch/langwatch/sdk-go/instrumentation/openai"
"github.com/openai/openai-go"
oaioption "github.com/openai/openai-go/option"
)
func main() {
ctx := context.Background()
client := openai.NewClient(
// Use the Groq API endpoint
oaioption.WithBaseURL("https://api.groq.com/openai/v1"),
// Use your Groq API key
oaioption.WithAPIKey(os.Getenv("GROQ_API_KEY")),
// Add the middleware, identifying the system as "groq"
oaioption.WithMiddleware(otelopenai.Middleware("my-groq-app",
otelopenai.WithGenAISystem("groq"),
otelopenai.WithCaptureInput(),
otelopenai.WithCaptureOutput(),
)),
)
// Make a call to a model on Groq
response, err := client.Chat.Completions.New(ctx, openai.ChatCompletionNewParams{
Model: "llama3-70b-8192",
Messages: []openai.ChatCompletionMessageParamUnion{
openai.UserMessage("Hello, Groq! Can you explain the concept of inference speed?"),
},
})
if err != nil {
log.Fatalf("Groq API call failed: %v", err)
}
log.Printf("Response from Groq: %s", response.Choices[0].Message.Content)
}