Add initial integration style tests (#20)

Signed-off-by: José Ulises Niño Rivera <junr03@users.noreply.github.com>
This commit is contained in:
José Ulises Niño Rivera 2024-07-25 14:41:36 -07:00 committed by GitHub
parent a0abd9c42d
commit a51a467cad
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 1401 additions and 29 deletions

View file

@ -46,7 +46,9 @@ jobs:
token: ${{ secrets.ADIL_GITHUB_TOKEN }}
- name: Setup | Rust
run: rustup toolchain install stable --profile minimal
- name: Setup | Install wasm toolchain
run: rustup target add wasm32-wasi
- name: Build wasm module
run: cd envoyfilter && cargo build --release --target=wasm32-wasi
- name: Run Tests
# --lib is to only test the library, since when integration tests are made,
# they will be in a seperate tests directory
run: cd envoyfilter && cargo test --lib
run: cd envoyfilter && cargo test

1322
envoyfilter/Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -15,3 +15,6 @@ serde_yaml = "0.9.34"
serde_json = "1.0"
md5 = "0.7.0"
open-message-format = { path = "../open-message-format/clients/omf-rust" }
[dev-dependencies]
proxy-wasm-test-framework = { git = "https://github.com/katanemo/test-framework.git", branch = "main" }

View file

@ -12,6 +12,11 @@ $ rustup target add wasm32-wasi
$ cargo build --target wasm32-wasi --release
```
## Testing
```sh
$ cargo test
```
## Using in Envoy
This example can be run with [`docker compose`](https://docs.docker.com/compose/install/)

View file

@ -0,0 +1,92 @@
use proxy_wasm_test_framework::tester;
use proxy_wasm_test_framework::types::{Action, BufferType, MapType, MetricType, ReturnType};
use std::path::Path;
fn wasm_module() -> String {
let wasm_file = Path::new("target/wasm32-wasi/release/intelligent_prompt_gateway.wasm");
assert!(
wasm_file.exists(),
"Run `cargo build --release --target=wasm32-wasi` first"
);
wasm_file.to_str().unwrap().to_string()
}
#[test]
fn request_to_open_ai_chat_completions() {
let args = tester::MockSettings {
wasm_path: wasm_module(),
quiet: false,
allow_unexpected: false,
};
let mut module = tester::mock(args).unwrap();
module
.call_start()
.execute_and_expect(ReturnType::None)
.unwrap();
// Setup Filter
let root_context = 1;
module
.call_proxy_on_context_create(root_context, 0)
.expect_metric_creation(MetricType::Gauge, "active_http_calls")
.execute_and_expect(ReturnType::None)
.unwrap();
// Setup HTTP Stream
let http_context = 2;
module
.call_proxy_on_context_create(http_context, root_context)
.execute_and_expect(ReturnType::None)
.unwrap();
// Request Headers
module
.call_proxy_on_request_headers(http_context, 0, false)
.expect_get_header_map_value(Some(MapType::HttpRequestHeaders), Some(":host"))
.returning(Some("api.openai.com"))
.expect_add_header_map_value(
Some(MapType::HttpRequestHeaders),
Some("content-length"),
Some(""),
)
.expect_get_header_map_value(Some(MapType::HttpRequestHeaders), Some(":path"))
.returning(Some("/llmrouting"))
.expect_add_header_map_value(
Some(MapType::HttpRequestHeaders),
Some(":path"),
Some("/v1/chat/completions"),
)
.execute_and_expect(ReturnType::Action(Action::Continue))
.unwrap();
// Request Body
let chat_completions_request_body = "\
{\
\"messages\": [\
{\
\"role\": \"system\",\
\"content\": \"You are a poetic assistant, skilled in explaining complex programming concepts with creative flair.\"\
},\
{\
\"role\": \"user\",\
\"content\": \"Compose a poem that explains the concept of recursion in programming.\"\
}\
]\
}";
module
.call_proxy_on_request_body(
http_context,
chat_completions_request_body.len() as i32,
true,
)
.expect_get_buffer_bytes(Some(BufferType::HttpRequestBody))
.returning(Some(chat_completions_request_body))
// TODO: assert that the model field was added.
.expect_set_buffer_bytes(Some(BufferType::HttpRequestBody), None)
.execute_and_expect(ReturnType::Action(Action::Continue))
.unwrap();
}