Sapiens supports multiple Language Model providers through a unified interface. All providers implement the same basic pattern: they return an OpenAI-compatible client that the Agent can use.
OpenAI provides access to GPT models including GPT-4 and GPT-3.5.
llm := NewOpenai(os.Getenv("OPENAI_API_KEY"))
Configuration:
gpt-4.1-2025-04-14
OPENAI_API_KEY
Example:
package main
import (
"context"
"fmt"
"log"
"os"
)
func main() {
// Initialize OpenAI provider
llm := NewOpenai(os.Getenv("OPENAI_API_KEY"))
// Create agent
agent := NewAgent(
context.Background(),
llm.Client(),
llm.GetDefaultModel(),
"You are a helpful assistant",
)
// Use the agent...
}
Google’s Gemini models accessed through their OpenAI-compatible API.
llm := NewGemini(os.Getenv("GEMINI_API_KEY"))
Configuration:
gemini-2.0-flash
https://generativelanguage.googleapis.com/v1beta/openai/
GEMINI_API_KEY
Example:
package main
import (
"context"
"fmt"
"log"
"os"
)
func main() {
// Initialize Gemini provider
llm := NewGemini(os.Getenv("GEMINI_API_KEY"))
// Create agent
agent := NewAgent(
context.Background(),
llm.Client(),
llm.GetDefaultModel(),
"You are a helpful assistant",
)
// Use the agent...
}
Anthropic’s Claude models accessed through their API.
llm := NewAnthropic(os.Getenv("ANTHROPIC_API_KEY"))
Configuration:
claude-sonet-3.5
https://generativelanguage.googleapis.com/v1beta/openai/
ANTHROPIC_API_KEY
Example:
package main
import (
"context"
"fmt"
"log"
"os"
)
func main() {
// Initialize Anthropic provider
llm := NewAnthropic(os.Getenv("ANTHROPIC_API_KEY"))
// Create agent
agent := NewAgent(
context.Background(),
llm.Client(),
llm.GetDefaultModel(),
"You are a helpful assistant",
)
// Use the agent...
}
Local models served through Ollama’s OpenAI-compatible API.
llm := NewOllama(baseUrl, authToken, modelName)
Configuration:
http://localhost:11434/v1/
)Example:
package main
import (
"context"
"fmt"
"log"
)
func main() {
// Initialize Ollama provider
llm := NewOllama(
"http://localhost:11434/v1/", // Base URL
"", // Auth token (optional for local)
"llama2", // Model name
)
// Create agent
agent := NewAgent(
context.Background(),
llm.Client(),
llm.GetDefaultModel(),
"You are a helpful assistant",
)
// Use the agent...
}
All providers implement the same basic interface:
type LLMProvider interface {
Client() *openai.Client
GetDefaultModel() string
}
Returns an OpenAI-compatible client configured for the specific provider.
Returns the default model name for the provider.
type OpenaiInterface struct {
BaseUrl string
DefaultModel string
OrgId string
AuthToken string
}
func NewOpenai(authToken string) *OpenaiInterface
func (g *OpenaiInterface) Client() *openai.Client
func (g *OpenaiInterface) GetDefaultModel() string
type GeminiInterface struct {
BaseUrl string
DefaultModel string
OrgId string
AuthToken string
}
func NewGemini(authToken string) *GeminiInterface
func (g *GeminiInterface) Client() *openai.Client
func (g *GeminiInterface) GetDefaultModel() string
type AnthropicInterface struct {
BaseUrl string
DefaultModel string
OrgId string
AuthToken string
}
func NewAnthropic(authToken string) *AnthropicInterface
func (g *AnthropicInterface) Client() *openai.Client
func (g *AnthropicInterface) GetDefaultModel() string
type OllamaInterface struct {
BaseUrl string
DefaultModel string
OrgId string
AuthToken string
}
func NewOllama(baseUrl, authToken, defaultModel string) *OllamaInterface
func (g *OllamaInterface) Client() *openai.Client
func (g *OllamaInterface) GetDefaultModel() string
You can easily switch between providers by changing the LLM initialization:
package main
import (
"context"
"os"
)
func main() {
var llm LLMProvider
// Choose provider based on environment or configuration
provider := os.Getenv("LLM_PROVIDER")
switch provider {
case "openai":
llm = NewOpenai(os.Getenv("OPENAI_API_KEY"))
case "gemini":
llm = NewGemini(os.Getenv("GEMINI_API_KEY"))
case "anthropic":
llm = NewAnthropic(os.Getenv("ANTHROPIC_API_KEY"))
case "ollama":
llm = NewOllama("http://localhost:11434/v1/", "", "llama2")
default:
llm = NewGemini(os.Getenv("GEMINI_API_KEY"))
}
// Create agent with chosen provider
agent := NewAgent(
context.Background(),
llm.Client(),
llm.GetDefaultModel(),
"You are a helpful assistant",
)
// Use the agent normally...
}
Set the appropriate environment variables for your chosen providers:
# OpenAI
export OPENAI_API_KEY="sk-your-openai-key"
# Google Gemini
export GEMINI_API_KEY="your-gemini-api-key"
# Anthropic
export ANTHROPIC_API_KEY="your-anthropic-key"
# Optional: Choose default provider
export LLM_PROVIDER="gemini"
All providers use the same error handling patterns through the OpenAI client:
resp, err := agent.Ask(messages)
if err != nil {
// Handle provider-specific errors
switch {
case strings.Contains(err.Error(), "authentication"):
log.Printf("Authentication error - check your API key")
case strings.Contains(err.Error(), "rate limit"):
log.Printf("Rate limit exceeded - try again later")
case strings.Contains(err.Error(), "model"):
log.Printf("Model error - check model name and availability")
default:
log.Printf("Provider error: %v", err)
}
return
}
While each provider has a default model, you can specify a different model when creating the agent:
// Use default model
agent := NewAgent(ctx, llm.Client(), llm.GetDefaultModel(), systemPrompt)
// Use specific model
agent := NewAgent(ctx, llm.Client(), "gpt-4", systemPrompt)
agent := NewAgent(ctx, llm.Client(), "gemini-1.5-pro", systemPrompt)
agent := NewAgent(ctx, llm.Client(), "claude-3-opus", systemPrompt)