disable bedrock tests (#732)

This commit is contained in:
Adil Hafeez 2026-02-10 00:34:00 -08:00 committed by GitHub
parent 46de89590b
commit 5394ef5770
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 10 additions and 1 deletions

View file

@ -480,6 +480,7 @@ def test_anthropic_thinking_mode_streaming():
assert "thinking" in final_block_types
@pytest.mark.skip("unreliable - bedrock tests are flaky in CI")
def test_openai_client_with_coding_model_alias_and_tools():
"""Test OpenAI client using 'coding-model' alias (maps to Bedrock) with coding question and tools"""
logger.info("Testing OpenAI client with 'coding-model' alias -> Bedrock with tools")
@ -529,6 +530,7 @@ def test_openai_client_with_coding_model_alias_and_tools():
)
@pytest.mark.skip("unreliable - bedrock tests are flaky in CI")
def test_anthropic_client_with_coding_model_alias_and_tools():
"""Test Anthropic client using 'coding-model' alias (maps to Bedrock) with coding question and tools"""
logger.info(
@ -576,7 +578,7 @@ def test_anthropic_client_with_coding_model_alias_and_tools():
assert text_content or len(tool_use_blocks) > 0
@pytest.mark.skip("flay test - to be fixed")
@pytest.mark.skip("unreliable - bedrock tests are flaky in CI")
def test_anthropic_client_with_coding_model_alias_and_tools_streaming():
"""Test Anthropic client using 'coding-model' alias (maps to Bedrock) with coding question and tools - streaming"""
logger.info(
@ -671,6 +673,7 @@ def test_anthropic_client_with_coding_model_alias_and_tools_streaming():
), f"Final message should have content. Got: {final_message.content if final_message else 'None'}"
@pytest.mark.skip("unreliable - bedrock tests are flaky in CI")
def test_anthropic_client_streaming_with_bedrock():
"""Test Anthropic client using 'coding-model' alias (maps to Bedrock) with streaming"""
logger.info(
@ -711,6 +714,7 @@ def test_anthropic_client_streaming_with_bedrock():
assert final_message.content and len(final_message.content) > 0
@pytest.mark.skip("unreliable - bedrock tests are flaky in CI")
def test_openai_client_streaming_with_bedrock():
"""Test OpenAI client using 'coding-model' alias (maps to Bedrock) with streaming"""
logger.info(
@ -749,6 +753,7 @@ def test_openai_client_streaming_with_bedrock():
assert len(full_content) > 0, "Expected text response from streaming"
@pytest.mark.skip("unreliable - bedrock tests are flaky in CI")
def test_openai_client_streaming_with_bedrock_and_tools():
"""Test OpenAI client using 'coding-model' alias (maps to Bedrock) with streaming and tools"""
logger.info(

View file

@ -327,6 +327,7 @@ def test_openai_responses_api_streaming_with_tools_upstream_chat_completions():
), "Expected streamed text or tool call argument deltas from Responses tools stream"
@pytest.mark.skip("unreliable - bedrock tests are flaky in CI")
def test_openai_responses_api_non_streaming_upstream_bedrock():
"""Send a v1/responses request using the coding-model alias to verify Bedrock translation/routing"""
base_url = LLM_GATEWAY_ENDPOINT.replace("/v1/chat/completions", "")
@ -347,6 +348,7 @@ def test_openai_responses_api_non_streaming_upstream_bedrock():
assert resp.id is not None
@pytest.mark.skip("unreliable - bedrock tests are flaky in CI")
def test_openai_responses_api_with_streaming_upstream_bedrock():
"""Build a v1/responses API streaming request routed to Bedrock via coding-model alias"""
base_url = LLM_GATEWAY_ENDPOINT.replace("/v1/chat/completions", "")
@ -392,6 +394,7 @@ def test_openai_responses_api_with_streaming_upstream_bedrock():
assert len(full_content) > 0, "Should have received content"
@pytest.mark.skip("unreliable - bedrock tests are flaky in CI")
def test_openai_responses_api_non_streaming_with_tools_upstream_bedrock():
"""Responses API with tools routed to Bedrock via coding-model alias"""
base_url = LLM_GATEWAY_ENDPOINT.replace("/v1/chat/completions", "")
@ -424,6 +427,7 @@ def test_openai_responses_api_non_streaming_with_tools_upstream_bedrock():
print(f"{'='*80}\n")
@pytest.mark.skip("unreliable - bedrock tests are flaky in CI")
def test_openai_responses_api_streaming_with_tools_upstream_bedrock():
"""Responses API with a function/tool definition streaming to Bedrock via coding-model alias"""
base_url = LLM_GATEWAY_ENDPOINT.replace("/v1/chat/completions", "")