Merge pull request #802 from CREDO23/sur-161-feat-add-github-models-as-llm-provider

[Improvements] Add GitHub models as llm provider
This commit is contained in:
Rohan Verma 2026-02-09 15:07:40 -08:00 committed by GitHub
commit dcd1497630
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 108 additions and 0 deletions

View file

@ -0,0 +1,23 @@
"""Add GITHUB_MODELS to LiteLLMProvider enum
Revision ID: 96
Revises: 95
"""
from collections.abc import Sequence
from alembic import op
revision: str = "96"
down_revision: str | None = "95"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
op.execute("COMMIT")
op.execute("ALTER TYPE litellmprovider ADD VALUE IF NOT EXISTS 'GITHUB_MODELS'")
def downgrade() -> None:
pass

View file

@ -45,6 +45,7 @@ PROVIDER_MAP = {
"ALIBABA_QWEN": "openai",
"MOONSHOT": "openai",
"ZHIPU": "openai",
"GITHUB_MODELS": "github",
"REPLICATE": "replicate",
"PERPLEXITY": "perplexity",
"ANYSCALE": "anyscale",

View file

@ -211,6 +211,7 @@ class LiteLLMProvider(str, Enum):
DATABRICKS = "DATABRICKS"
COMETAPI = "COMETAPI"
HUGGINGFACE = "HUGGINGFACE"
GITHUB_MODELS = "GITHUB_MODELS"
CUSTOM = "CUSTOM"

View file

@ -56,6 +56,7 @@ PROVIDER_MAP = {
"ALIBABA_QWEN": "openai",
"MOONSHOT": "openai",
"ZHIPU": "openai",
"GITHUB_MODELS": "github",
"HUGGINGFACE": "huggingface",
"CUSTOM": "custom",
}

View file

@ -119,6 +119,7 @@ async def validate_llm_config(
"ALIBABA_QWEN": "openai",
"MOONSHOT": "openai",
"ZHIPU": "openai", # GLM needs special handling
"GITHUB_MODELS": "github",
}
provider_prefix = provider_map.get(provider, provider.lower())
model_string = f"{provider_prefix}/{model_name}"
@ -335,6 +336,7 @@ async def get_search_space_llm_instance(
"ALIBABA_QWEN": "openai",
"MOONSHOT": "openai",
"ZHIPU": "openai",
"GITHUB_MODELS": "github",
}
provider_prefix = provider_map.get(
llm_config.provider.value, llm_config.provider.value.lower()

View file

@ -1477,6 +1477,78 @@ export const LLM_MODELS: LLMModel[] = [
provider: "DATABRICKS",
contextWindow: "128K",
},
// GitHub Models
{
value: "openai/gpt-5",
label: "GitHub GPT-5",
provider: "GITHUB_MODELS",
},
{
value: "openai/gpt-4.1",
label: "GitHub GPT-4.1",
provider: "GITHUB_MODELS",
contextWindow: "1048K",
},
{
value: "openai/gpt-4o",
label: "GitHub GPT-4o",
provider: "GITHUB_MODELS",
contextWindow: "128K",
},
{
value: "deepseek/DeepSeek-V3-0324",
label: "GitHub DeepSeek V3",
provider: "GITHUB_MODELS",
contextWindow: "64K",
},
{
value: "xai/grok-3",
label: "GitHub Grok 3",
provider: "GITHUB_MODELS",
contextWindow: "131K",
},
{
value: "openai/gpt-5-mini",
label: "GitHub GPT-5 Mini",
provider: "GITHUB_MODELS",
},
{
value: "openai/gpt-4.1-mini",
label: "GitHub GPT-4.1 Mini",
provider: "GITHUB_MODELS",
contextWindow: "1048K",
},
{
value: "meta/Llama-4-Scout-17B-16E-Instruct",
label: "GitHub Llama 4 Scout",
provider: "GITHUB_MODELS",
contextWindow: "512K",
},
{
value: "openai/gpt-4.1-nano",
label: "GitHub GPT-4.1 Nano",
provider: "GITHUB_MODELS",
contextWindow: "1048K",
},
{
value: "openai/gpt-4o-mini",
label: "GitHub GPT-4o Mini",
provider: "GITHUB_MODELS",
contextWindow: "128K",
},
{
value: "openai/o4-mini",
label: "GitHub O4 Mini",
provider: "GITHUB_MODELS",
contextWindow: "200K",
},
{
value: "deepseek/DeepSeek-R1",
label: "GitHub DeepSeek R1",
provider: "GITHUB_MODELS",
contextWindow: "64K",
},
];
// Helper function to get models by provider

View file

@ -174,6 +174,13 @@ export const LLM_PROVIDERS: LLMProvider[] = [
example: "databricks/databricks-meta-llama-3-3-70b-instruct",
description: "Databricks Model Serving",
},
{
value: "GITHUB_MODELS",
label: "GitHub Models",
example: "openai/gpt-5, meta/llama-3.1-405b-instruct",
description: "AI models from GitHub Marketplace",
apiBase: "https://models.github.ai/inference",
},
{
value: "CUSTOM",
label: "Custom Provider",

View file

@ -33,6 +33,7 @@ export const liteLLMProviderEnum = z.enum([
"DATABRICKS",
"COMETAPI",
"HUGGINGFACE",
"GITHUB_MODELS",
"CUSTOM",
]);