-
Notifications
You must be signed in to change notification settings - Fork 0
Configuration
Arian edited this page Dec 24, 2025
·
3 revisions
All configuration is done via package-level variables and environment variables.
import ai "gopkg.in/dragon-born/go-llm.v1"
func init() {
// Default model for Ask(), Prompt(), etc.
ai.DefaultModel = ai.ModelClaudeOpus // Change from GPT-5
// Default provider (OpenRouter, OpenAI, Anthropic, Ollama, Azure)
ai.SetDefaultProvider(ai.ProviderAnthropic) // Change from OpenRouter
// Directory for prompt files
ai.PromptsDir = "my-prompts" // Change from "prompts"
// Debug mode - print all requests/responses
ai.Debug = true
// Pretty mode - colored terminal output
ai.Pretty = true // default
// Cache mode - cache identical requests
ai.Cache = true
}This package supports process-wide rate limiting via ai.RateLimiter. When set, outgoing requests will block until allowed.
import (
"time"
ai "gopkg.in/dragon-born/go-llm.v1"
)
func init() {
// 60 requests per minute (global)
ai.RateLimiter = ai.NewLimiter(60, time.Minute)
// Or ~2 requests per second (allows small bursts)
// ai.RateLimiter = ai.NewLimiterPerSecond(2.0)
}Disable it by setting ai.RateLimiter = nil.
NewConcurrencyLimiter() is a simple helper for your own goroutines:
cl := ai.NewConcurrencyLimiter(5)
for _, job := range jobs {
cl.Acquire()
go func(job Job) {
defer cl.Release()
_, _ = ai.GPT5().Ask(job.Prompt)
}(job)
}Set the API key for your provider(s):
# OpenRouter (default provider)
export OPENROUTER_API_KEY="sk-or-..."
# Direct OpenAI
export OPENAI_API_KEY="sk-..."
# Direct Anthropic
export ANTHROPIC_API_KEY="sk-ant-..."
# Azure OpenAI (optional)
export AZURE_OPENAI_API_KEY="..."
# Ollama requires no API key (runs locally)The package auto-detects which key to use based on the provider you're calling.
The DefaultModel is used by:
-
ai.Ask()- quick one-liners -
ai.AskWith()- one-liners with system prompt -
ai.Prompt()- loading prompts -
ai.PromptWith()- loading prompts with vars -
ai.Default()- get a builder with default model - Error fallbacks
// Change default to Claude
ai.DefaultModel = ai.ModelClaudeOpus
// Now these all use Claude:
ai.Ask("Hello")
ai.Prompt("analyst").Ask("Update?")
// Explicit model still works
ai.GPT5().Ask("Hello") // Uses GPT-5 regardless// Default
ai.PromptsDir = "prompts"
// Change to custom directory
ai.PromptsDir = "system-prompts"
ai.PromptsDir = "/absolute/path/to/prompts"
// Now LoadPrompt and Prompt() look here:
ai.Prompt("analyst") // Loads system-prompts/analyst.md| Variable/Function | Default | Description |
|---|---|---|
ai.DefaultModel |
ModelGPT5 |
Default model for shortcuts |
ai.DefaultProvider |
ProviderOpenRouter |
Default provider |
ai.SetDefaultProvider() |
- | Change default provider |
ai.PromptsDir |
"prompts" |
Prompt files directory |
ai.Debug |
false |
Print requests/responses |
ai.Pretty |
true |
Colored terminal output |
ai.Cache |
false |
Cache identical requests |
// All ai.Claude(), ai.GPT5() etc. will now use Anthropic directly
ai.SetDefaultProvider(ai.ProviderAnthropic)
// Available providers:
// ai.ProviderOpenRouter (default)
// ai.ProviderOpenAI
// ai.ProviderAnthropic
// ai.ProviderOllama
// ai.ProviderAzureimport (
"time"
ai "gopkg.in/dragon-born/go-llm.v1"
)
// Custom Anthropic client with timeout
anthropic := ai.NewClient(ai.ProviderAnthropic,
ai.WithAPIKey("sk-ant-..."),
ai.WithTimeout(60 * time.Second),
)
// Custom Ollama client on different host
ollama := ai.NewClient(ai.ProviderOllama,
ai.WithBaseURL("http://gpu-server:11434"),
)
// Use them
anthropic.Claude().Ask("Hello")
ollama.Use("llama3:8b").Ask("Hello")| Option | Description |
|---|---|
WithAPIKey(key) |
Set API key |
WithBaseURL(url) |
Set custom base URL |
WithTimeout(d) |
Set request timeout |
WithHeaders(map) |
Set custom headers |
package main
import (
"os"
ai "gopkg.in/dragon-born/go-llm.v1"
)
func init() {
// Use Claude as default everywhere
ai.DefaultModel = ai.ModelClaudeOpus
// Use Anthropic directly (instead of OpenRouter)
ai.SetDefaultProvider(ai.ProviderAnthropic)
// For local development, use Ollama
if os.Getenv("LOCAL_DEV") != "" {
ai.SetDefaultProvider(ai.ProviderOllama)
ai.DefaultModel = ai.Model("llama3:8b")
}
// Custom directories
ai.PromptsDir = "config/prompts"
// Development settings
ai.Debug = false
ai.Cache = true // Save API calls during dev
}
func main() {
// Now everything uses your configuration
ai.Ask("Hello!")
ai.Prompt("analyst").Ask("Update?")
// Or be explicit about provider
ai.Anthropic().Claude().Ask("Hello")
ai.Ollama().Use("llama3:8b").Ask("Hello")
}- Multi-Provider Support - Full provider documentation