diff --git a/test/components/fetchers/test_link_content_fetcher.py b/test/components/fetchers/test_link_content_fetcher.py index ed69296d3..40767609f 100644 --- a/test/components/fetchers/test_link_content_fetcher.py +++ b/test/components/fetchers/test_link_content_fetcher.py @@ -123,7 +123,7 @@ class TestLinkContentFetcher: def test_run_bad_status_code(self): """Test behavior when a request results in an error status code""" empty_byte_stream = b"" - fetcher = LinkContentFetcher(raise_on_failure=False) + fetcher = LinkContentFetcher(raise_on_failure=False, retry_attempts=0) mock_response = Mock(status_code=403) mock_response.raise_for_status.side_effect = httpx.HTTPStatusError( "403 Client Error", request=Mock(), response=mock_response @@ -304,12 +304,12 @@ class TestLinkContentFetcherAsync: mock_get.return_value = mock_response # With raise_on_failure=False - fetcher = LinkContentFetcher(raise_on_failure=False) + fetcher = LinkContentFetcher(raise_on_failure=False, retry_attempts=0) streams = (await fetcher.run_async(urls=["https://www.example.com"]))["streams"] assert len(streams) == 1 # Returns an empty stream # With raise_on_failure=True - fetcher = LinkContentFetcher(raise_on_failure=True) + fetcher = LinkContentFetcher(raise_on_failure=True, retry_attempts=0) with pytest.raises(httpx.HTTPStatusError): await fetcher.run_async(urls=["https://www.example.com"]) diff --git a/test/components/generators/test_openai.py b/test/components/generators/test_openai.py index 84704e72e..f715f712b 100644 --- a/test/components/generators/test_openai.py +++ b/test/components/generators/test_openai.py @@ -308,18 +308,9 @@ class TestOpenAIGenerator: ) @pytest.mark.integration def test_run_with_system_prompt(self): - generator = OpenAIGenerator( - model="gpt-4o-mini", - system_prompt="You answer in Portuguese, regardless of the language on which a question is asked", - ) - result = generator.run("Can you explain the Pitagoras therom?") - assert "teorema" in result["replies"][0].lower() - - result = generator.run( - "Can you explain the Pitagoras therom? Repeat the name of the theorem in German.", - system_prompt="You answer in German, regardless of the language on which a question is asked.", - ) - assert "pythag" in result["replies"][0].lower() + generator = OpenAIGenerator(model="gpt-4o-mini", system_prompt="Answer in Italian using only one word.") + result = generator.run("What's the capital of Italy?") + assert "roma" in result["replies"][0].lower() @pytest.mark.skipif( not os.environ.get("OPENAI_API_KEY", None), diff --git a/test/core/pipeline/test_async_pipeline.py b/test/core/pipeline/test_async_pipeline.py index 474449f78..fdb05bfd5 100644 --- a/test/core/pipeline/test_async_pipeline.py +++ b/test/core/pipeline/test_async_pipeline.py @@ -7,7 +7,7 @@ def test_async_pipeline_reentrance(waiting_component, spying_tracer): pp = AsyncPipeline() pp.add_component("wait", waiting_component()) - run_data = [{"wait_for": 1}, {"wait_for": 2}] + run_data = [{"wait_for": 0.001}, {"wait_for": 0.002}] async def run_all(): # Create concurrent tasks for each pipeline run diff --git a/test/core/pipeline/test_pipeline.py b/test/core/pipeline/test_pipeline.py index f4c18c130..ae9a22e3b 100644 --- a/test/core/pipeline/test_pipeline.py +++ b/test/core/pipeline/test_pipeline.py @@ -23,7 +23,7 @@ class TestPipeline: pp = Pipeline() pp.add_component("wait", waiting_component()) - run_data = [{"wait_for": 1}, {"wait_for": 2}] + run_data = [{"wait_for": 0.001}, {"wait_for": 0.002}] # Use ThreadPoolExecutor to run pipeline calls in parallel with ThreadPoolExecutor(max_workers=len(run_data)) as executor: