fix: route Perplexity OpenAI endpoints without /v1 (#854)

* fix: route Perplexity OpenAI paths without /v1

* add tests for Perplexity provider handling in LLM module

* refactor: use constant for Perplexity provider prefix in LLM module

* moving const to top of file
This commit is contained in:
Musa 2026-03-31 20:40:42 -04:00 committed by GitHub
parent d8f4fd76e3
commit 3dbda9741e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 189 additions and 32 deletions

View file

@ -122,6 +122,7 @@ impl StreamContext {
.unwrap_or(&"".to_string()),
self.streaming_response,
self.llm_provider().base_url_path_prefix.as_deref(),
self.llm_provider().name.starts_with("perplexity/"),
);
if target_endpoint != request_path {
self.set_http_request_header(":path", Some(&target_endpoint));