feat: adding logprobs to /v1/chat/completion
This commit is contained in:
parent
eda48562da
commit
4d80dc5e7c
1 changed files with 4 additions and 0 deletions
|
|
@ -2524,6 +2524,8 @@ async def openai_chat_completions_proxy(request: Request):
|
||||||
max_tokens = payload.get("max_tokens")
|
max_tokens = payload.get("max_tokens")
|
||||||
max_completion_tokens = payload.get("max_completion_tokens")
|
max_completion_tokens = payload.get("max_completion_tokens")
|
||||||
tools = payload.get("tools")
|
tools = payload.get("tools")
|
||||||
|
logprobs = payload.get("logprobs")
|
||||||
|
top_logprobs = payload.get("top_logprobs")
|
||||||
|
|
||||||
if ":latest" in model:
|
if ":latest" in model:
|
||||||
model = model.split(":latest")
|
model = model.split(":latest")
|
||||||
|
|
@ -2547,6 +2549,8 @@ async def openai_chat_completions_proxy(request: Request):
|
||||||
"frequency_penalty": frequency_penalty,
|
"frequency_penalty": frequency_penalty,
|
||||||
"stop": stop,
|
"stop": stop,
|
||||||
"stream": stream,
|
"stream": stream,
|
||||||
|
"logprobs": logprobs,
|
||||||
|
"top_logprobs": top_logprobs,
|
||||||
}
|
}
|
||||||
|
|
||||||
params.update({k: v for k, v in optional_params.items() if v is not None})
|
params.update({k: v for k, v in optional_params.items() if v is not None})
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue