Documentation Index Fetch the complete documentation index at: https://docs.getbifrost.ai/llms.txt
Use this file to discover all available pages before exploring further.
Your browser does not support the video tag.
30-Second Setup
Get Bifrost running in your Go application with minimal setup. This guide shows you how to integrate multiple AI providers through a single, unified interface.
1. Install Package
go mod init my-bifrost-app
go get github.com/maximhq/bifrost/core
2. Set Environment Variable
export OPENAI_API_KEY = "your-openai-api-key"
3. Create main.go
package main
import (
" context "
" fmt "
" os "
" github.com/maximhq/bifrost/core "
" github.com/maximhq/bifrost/core/schemas "
)
type MyAccount struct {}
// Account interface needs to implement these 3 methods
func ( a * MyAccount ) GetConfiguredProviders () ([] schemas . ModelProvider , error ) {
return [] schemas . ModelProvider { schemas . OpenAI }, nil
}
func ( a * MyAccount ) GetKeysForProvider ( ctx * context . Context , provider schemas . ModelProvider ) ([] schemas . Key , error ) {
if provider == schemas . OpenAI {
return [] schemas . Key {{
Value : os . Getenv ( "OPENAI_API_KEY" ),
Models : schemas . WhiteList { "*" }, // Keep Models ["*"] to use any model
Weight : 1.0 ,
}}, nil
}
return nil , fmt . Errorf ( "provider %s not supported" , provider )
}
func ( a * MyAccount ) GetConfigForProvider ( provider schemas . ModelProvider ) ( * schemas . ProviderConfig , error ) {
if provider == schemas . OpenAI {
// Return default config (can be customized for advanced use cases)
return & schemas . ProviderConfig {
NetworkConfig : schemas . DefaultNetworkConfig ,
ConcurrencyAndBufferSize : schemas . DefaultConcurrencyAndBufferSize ,
}, nil
}
return nil , fmt . Errorf ( "provider %s not supported" , provider )
}
// Main function implement to initialize bifrost and make a request
func main () {
client , initErr := bifrost . Init ( context . Background (), schemas . BifrostConfig {
Account : & MyAccount {},
})
if initErr != nil {
panic ( initErr )
}
defer client . Shutdown ()
messages := [] schemas . ChatMessage {
{
Role : schemas . ChatMessageRoleUser ,
Content : & schemas . ChatMessageContent {
ContentStr : schemas . Ptr ( "Hello, Bifrost!" ),
},
},
}
response , err := client . ChatCompletionRequest ( schemas . NewBifrostContext ( context . Background (), schemas . NoDeadline ), & schemas . BifrostChatRequest {
Provider : schemas . OpenAI ,
Model : "gpt-4o-mini" ,
Input : messages ,
})
if err != nil {
panic ( err )
}
fmt . Println ( "Response:" , * response . Choices [ 0 ]. Message . Content . ContentStr )
}
4. Run Your App
go run main.go
# Output: Response: Hello! I'm Bifrost, your AI model gateway...
🎉 That’s it! You’re now running Bifrost in your Go application.
What Just Happened?
Account Interface : MyAccount provides API keys and list of providers to Bifrost for initialisation and key lookups.
Provider Resolution : schemas.OpenAI tells Bifrost to use OpenAI as the provider.
Model Selection : "gpt-4o-mini" specifies which model to use.
Unified API : Same interface works for any provider/model combination (OpenAI, Anthropic, Vertex etc.)
Next Steps
Now that you have Bifrost running, explore these focused guides:
Essential Topics
Advanced Topics
Happy coding with Bifrost! 🚀