Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/pr-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,7 @@ jobs:
COHERE_API_KEY: ${{ secrets.COHERE_API_KEY }}
VERTEX_CREDENTIALS: ${{ secrets.VERTEX_CREDENTIALS }}
VERTEX_PROJECT_ID: ${{ secrets.VERTEX_PROJECT_ID }}
HUGGING_FACE_API_KEY: ${{ secrets.HUGGING_FACE_API_KEY }}
run: |
echo "Running tests for PR #${{ github.event.pull_request.number || 'manual run' }}"
./.github/workflows/scripts/run-tests.sh
Expand Down
4 changes: 4 additions & 0 deletions .github/workflows/release-pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,7 @@ jobs:
VERTEX_CREDENTIALS: ${{ secrets.VERTEX_CREDENTIALS }}
VERTEX_PROJECT_ID: ${{ secrets.VERTEX_PROJECT_ID }}
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
HUGGING_FACE_API_KEY: ${{ secrets.HUGGING_FACE_API_KEY }}
AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }}
AWS_BEDROCK_ROLE_ARN: ${{ secrets.AWS_BEDROCK_ROLE_ARN }}
run: ./.github/workflows/scripts/release-core.sh "${{ needs.detect-changes.outputs.core-version }}"
Expand Down Expand Up @@ -189,6 +190,7 @@ jobs:
VERTEX_CREDENTIALS: ${{ secrets.VERTEX_CREDENTIALS }}
VERTEX_PROJECT_ID: ${{ secrets.VERTEX_PROJECT_ID }}
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
HUGGING_FACE_API_KEY: ${{ secrets.HUGGING_FACE_API_KEY }}
run: ./.github/workflows/scripts/release-framework.sh "${{ needs.detect-changes.outputs.framework-version }}"

plugins-release:
Expand Down Expand Up @@ -265,6 +267,7 @@ jobs:
VERTEX_CREDENTIALS: ${{ secrets.VERTEX_CREDENTIALS }}
VERTEX_PROJECT_ID: ${{ secrets.VERTEX_PROJECT_ID }}
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
HUGGING_FACE_API_KEY: ${{ secrets.HUGGING_FACE_API_KEY }}
run: ./.github/workflows/scripts/release-all-plugins.sh '${{ needs.detect-changes.outputs.changed-plugins }}'

bifrost-http-release:
Expand Down Expand Up @@ -353,6 +356,7 @@ jobs:
VERTEX_CREDENTIALS: ${{ secrets.VERTEX_CREDENTIALS }}
VERTEX_PROJECT_ID: ${{ secrets.VERTEX_PROJECT_ID }}
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
HUGGING_FACE_API_KEY: ${{ secrets.HUGGING_FACE_API_KEY }}
run: ./.github/workflows/scripts/release-bifrost-http.sh "${{ needs.detect-changes.outputs.transport-version }}"

# Docker build amd64
Expand Down
3 changes: 3 additions & 0 deletions core/bifrost.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import (
"github.com/maximhq/bifrost/core/providers/elevenlabs"
"github.com/maximhq/bifrost/core/providers/gemini"
"github.com/maximhq/bifrost/core/providers/groq"
"github.com/maximhq/bifrost/core/providers/huggingface"
"github.com/maximhq/bifrost/core/providers/mistral"
"github.com/maximhq/bifrost/core/providers/nebius"
"github.com/maximhq/bifrost/core/providers/ollama"
Expand Down Expand Up @@ -1885,6 +1886,8 @@ func (bifrost *Bifrost) createBaseProvider(providerKey schemas.ModelProvider, co
return elevenlabs.NewElevenlabsProvider(config, bifrost.logger), nil
case schemas.Nebius:
return nebius.NewNebiusProvider(config, bifrost.logger)
case schemas.HuggingFace:
return huggingface.NewHuggingFaceProvider(config, bifrost.logger), nil
default:
return nil, fmt.Errorf("unsupported provider: %s", targetProviderKey)
}
Expand Down
1 change: 1 addition & 0 deletions core/changelog.md
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
- feat(providers): added HuggingFace provider using Inference Provider API, support for chat(with stream also), response(with stream also), TTS and speech synthesis
- fix(mcp): ensure properties field is always set for tools - [@CryptoFewka](https://github.com/CryptoFewka)
- fix(perplexity): correct search_domain_filter json tag - [@hnoguchigr](https://github.com/hnoguchigr)
56 changes: 56 additions & 0 deletions core/internal/testutil/account.go
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,7 @@ func (account *ComprehensiveTestAccount) GetConfiguredProviders() ([]schemas.Mod
schemas.Cerebras,
schemas.Gemini,
schemas.OpenRouter,
schemas.HuggingFace,
schemas.Nebius,
ProviderOpenAICustom,
}, nil
Expand Down Expand Up @@ -323,6 +324,14 @@ func (account *ComprehensiveTestAccount) GetKeysForProvider(ctx *context.Context
UseForBatchAPI: bifrost.Ptr(true),
},
}, nil
case schemas.HuggingFace:
return []schemas.Key{
{
Value: os.Getenv("HUGGING_FACE_API_KEY"),
Models: []string{},
Weight: 1.0,
},
}, nil
case schemas.Nebius:
return []schemas.Key{
{
Expand Down Expand Up @@ -577,6 +586,19 @@ func (account *ComprehensiveTestAccount) GetConfigForProvider(providerKey schema
BufferSize: 10,
},
}, nil
case schemas.HuggingFace:
return &schemas.ProviderConfig{
NetworkConfig: schemas.NetworkConfig{
DefaultRequestTimeoutInSeconds: 300,
MaxRetries: 10, // HuggingFace can be variable
RetryBackoffInitial: 2 * time.Second,
RetryBackoffMax: 30 * time.Second,
},
ConcurrencyAndBufferSize: schemas.ConcurrencyAndBufferSize{
Concurrency: Concurrency,
BufferSize: 10,
},
}, nil
case schemas.Nebius:
return &schemas.ProviderConfig{
NetworkConfig: schemas.NetworkConfig{
Expand Down Expand Up @@ -995,4 +1017,38 @@ var AllProviderConfigs = []ComprehensiveTestConfig{
{Provider: schemas.OpenAI, Model: "gpt-4o-mini"},
},
},
{
Provider: schemas.HuggingFace,
ChatModel: "groq/openai/gpt-oss-120b",
VisionModel: "fireworks-ai/Qwen/Qwen2.5-VL-32B-Instruct",
EmbeddingModel: "sambanova/intfloat/e5-mistral-7b-instruct",
TranscriptionModel: "fal-ai/openai/whisper-large-v3",
SpeechSynthesisModel: "fal-ai/hexgrad/Kokoro-82M",
Scenarios: TestScenarios{
TextCompletion: false,
TextCompletionStream: false,
SimpleChat: true,
CompletionStream: true,
MultiTurnConversation: true,
ToolCalls: true,
ToolCallsStreaming: true,
MultipleToolCalls: false,
End2EndToolCalling: true,
AutomaticFunctionCall: true,
ImageURL: true,
ImageBase64: true,
MultipleImages: true,
CompleteEnd2End: true,
Embedding: true,
Transcription: true,
TranscriptionStream: false,
SpeechSynthesis: true,
SpeechSynthesisStream: false,
Reasoning: false,
ListModels: true,
},
Fallbacks: []schemas.Fallback{
{Provider: schemas.OpenAI, Model: "gpt-4o-mini"},
},
},
}
2 changes: 1 addition & 1 deletion core/internal/testutil/responses_stream.go
Original file line number Diff line number Diff line change
Expand Up @@ -690,7 +690,7 @@ func RunResponsesStreamTest(t *testing.T, client *bifrost.Bifrost, ctx context.C
}

// Safety check to prevent infinite loops
if responseCount > 100 {
if responseCount > 300 {
goto lifecycleComplete
}

Expand Down
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
2 changes: 1 addition & 1 deletion core/internal/testutil/scenarios/media/lion_base64.txt

Large diffs are not rendered by default.

Loading