params handling for googleapis

This commit is contained in:
Alpha Nerd 2025-09-10 15:25:25 +02:00
parent 2813ecb044
commit ddd3eb9e84
2 changed files with 26 additions and 12 deletions

View file

@ -5,6 +5,7 @@ endpoints:
- http://192.168.0.52:11434
#- https://openrouter.ai/api/v1
#- https://api.openai.com/v1
#- https://generativelanguage.googleapis.com/v1beta/openai
# Maximum concurrent connections *per endpointmodel pair* (equals to OLLAMA_NUM_PARALLEL)
max_concurrent_connections: 2
@ -18,3 +19,4 @@ api_keys:
"http://192.168.0.52:11434": "ollama"
#"https://openrouter.ai/api/v1": "${OPENROUTER_KEY}"
#"https://api.openai.com/v1": "${OPENAI_KEY}"
#"https://generativelanguage.googleapis.com/v1beta/openai": "${GEMINI_KEY}"

View file

@ -1051,8 +1051,6 @@ async def openai_chat_completions_proxy(request: Request):
params = {
"messages": messages,
"model": model,
"stop": stop,
"stream": stream,
}
if tools is not None:
@ -1075,6 +1073,11 @@ async def openai_chat_completions_proxy(request: Request):
params["presence_penalty"] = presence_penalty
if frequency_penalty is not None:
params["frequency_penalty"] = frequency_penalty
if stop is not None:
params["stop"] = stop
if stream is not None:
params["stream"] = stream
if not model:
raise HTTPException(
@ -1157,20 +1160,29 @@ async def openai_completions_proxy(request: Request):
params = {
"prompt": prompt,
"model": model,
"frequency_penalty": frequency_penalty,
"presence_penalty": presence_penalty,
"seed": seed,
"stop": stop,
"stream": stream,
"temperature": temperature,
"top_p": top_p,
"max_tokens": max_tokens,
"suffix": suffix
}
if stream_options is not None:
params["stream_options"] = stream_options
if frequency_penalty is not None:
params["frequency_penalty"] = frequency_penalty
if presence_penalty is not None:
params["presence_penalty"] = presence_penalty
if seed is not None:
params["seed"] = seed
if stop is not None:
params["stop"] = stop
if stream is not None:
params["stream"] = stream
if temperature is not None:
params["temperature"] = temperature
if top_p is not None:
params["top_p"] = top_p
if max_tokens is not None:
params["max_tokens"] = max_tokens
if suffix is not None:
params["suffix"] = suffix
if not model:
raise HTTPException(
status_code=400, detail="Missing required field 'model'"