package main
import (
"context"
"fmt"
"github.com/maximhq/bifrost"
"github.com/maximhq/bifrost/core/schemas"
)
func chatWithFallbacks(client *bifrost.Bifrost) {
ctx := context.Background()
// Chat request with multiple fallbacks
response, err := client.ChatCompletionRequest(ctx, &schemas.BifrostChatRequest{
Provider: schemas.OpenAI,
Model: "gpt-4o-mini",
Input: []schemas.ChatMessage{
{
Role: schemas.ChatMessageRoleUser,
Content: schemas.ChatMessageContent{
ContentStr: bifrost.Ptr("Explain quantum computing in simple terms"),
},
},
},
// Fallback chain: OpenAI → Anthropic → Bedrock
Fallbacks: []schemas.Fallback{
{
Provider: schemas.Anthropic,
Model: "claude-3-5-sonnet-20241022",
},
{
Provider: schemas.Bedrock,
Model: "anthropic.claude-3-sonnet-20240229-v1:0",
},
},
Params: &schemas.ChatParameters{
MaxCompletionTokens: bifrost.Ptr(1000),
Temperature: bifrost.Ptr(0.7),
},
})
if err != nil {
fmt.Printf("All providers failed: %v\n", err)
return
}
// Success! Response came from whichever provider worked
fmt.Printf("Response from %s: %s\n",
response.ExtraFields.Provider,
*response.Choices[0].BifrostNonStreamResponseChoice.Message.Content.ContentStr)
}