More fixes for litellm, openrouter support

This commit is contained in:
Ramnique Singh 2025-04-28 22:17:52 +05:30
parent 14eee3e0c3
commit 118c724ad7
8 changed files with 33 additions and 23 deletions

View file

@ -5,15 +5,21 @@ import dotenv
dotenv.load_dotenv()
PROVIDER_BASE_URL = os.getenv('PROVIDER_BASE_URL', '')
PROVIDER_API_KEY = os.getenv('PROVIDER_API_KEY', os.getenv('OPENAI_API_KEY', ''))
PROVIDER_DEFAULT_MODEL = os.getenv('PROVIDER_DEFAULT_MODEL', 'gpt-4.1')
PROVIDER_API_KEY = os.getenv('PROVIDER_API_KEY')
PROVIDER_DEFAULT_MODEL = os.getenv('PROVIDER_DEFAULT_MODEL')
client = None
if not PROVIDER_API_KEY:
raise ValueError("No LLM Provider API key found")
PROVIDER_API_KEY = os.getenv('OPENAI_API_KEY')
if not PROVIDER_API_KEY:
raise(ValueError("No LLM Provider API key found"))
if not PROVIDER_DEFAULT_MODEL:
PROVIDER_DEFAULT_MODEL = 'gpt-4.1'
if PROVIDER_BASE_URL:
print(f"Using provider {PROVIDER_BASE_URL} with API key {PROVIDER_API_KEY}")
print(f"Using provider {PROVIDER_BASE_URL}")
client = AsyncOpenAI(base_url=PROVIDER_BASE_URL, api_key=PROVIDER_API_KEY)
else:
print("No provider base URL configured, using OpenAI directly")