chore: move GPT35Generator tests in the main test suite (#5844)

* move tests

* fix no-test-found error from pytest

* missing self

---------

Co-authored-by: Massimiliano Pippi <mpippi@gmail.com>
This commit is contained in:
ZanSara 2023-09-21 11:42:32 +02:00 committed by GitHub
parent 5820120f9b
commit 23fdef929e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 54 additions and 60 deletions

View File

@ -30,7 +30,6 @@ jobs:
folder:
- "document_search"
- "pipelines"
- "preview"
runs-on: ubuntu-latest

View File

@ -1,59 +0,0 @@
import os
import pytest
import openai
from haystack.preview.components.generators.openai.gpt35 import GPT35Generator
from haystack.preview.components.generators.openai.gpt4 import GPT4Generator
@pytest.mark.skipif(
not os.environ.get("OPENAI_API_KEY", None),
reason="Export an env var called OPENAI_API_KEY containing the OpenAI API key to run this test.",
)
@pytest.mark.parametrize("generator_class,model_name", [(GPT35Generator, "gpt-3.5"), (GPT4Generator, "gpt-4")])
def test_gpt35_generator_run(generator_class, model_name):
component = generator_class(api_key=os.environ.get("OPENAI_API_KEY"), n=1)
results = component.run(prompt="What's the capital of France?")
assert len(results["replies"]) == 1
assert "Paris" in results["replies"][0]
assert len(results["metadata"]) == 1
assert model_name in results["metadata"][0]["model"]
assert results["metadata"][0]["finish_reason"] == "stop"
@pytest.mark.skipif(
not os.environ.get("OPENAI_API_KEY", None),
reason="Export an env var called OPENAI_API_KEY containing the OpenAI API key to run this test.",
)
@pytest.mark.parametrize("generator_class", [GPT35Generator, GPT4Generator])
def test_gpt35_generator_run_wrong_model_name(generator_class):
component = generator_class(model_name="something-obviously-wrong", api_key=os.environ.get("OPENAI_API_KEY"), n=1)
with pytest.raises(openai.InvalidRequestError, match="The model `something-obviously-wrong` does not exist"):
component.run(prompt="What's the capital of France?")
@pytest.mark.skipif(
not os.environ.get("OPENAI_API_KEY", None),
reason="Export an env var called OPENAI_API_KEY containing the OpenAI API key to run this test.",
)
@pytest.mark.parametrize("generator_class,model_name", [(GPT35Generator, "gpt-3.5"), (GPT4Generator, "gpt-4")])
def test_gpt35_generator_run_streaming(generator_class, model_name):
class Callback:
def __init__(self):
self.responses = ""
def __call__(self, chunk):
self.responses += chunk.choices[0].delta.content if chunk.choices[0].delta else ""
return chunk
callback = Callback()
component = generator_class(os.environ.get("OPENAI_API_KEY"), streaming_callback=callback, n=1)
results = component.run(prompt="What's the capital of France?")
assert len(results["replies"]) == 1
assert "Paris" in results["replies"][0]
assert len(results["metadata"]) == 1
assert model_name in results["metadata"][0]["model"]
assert results["metadata"][0]["finish_reason"] == "stop"
assert callback.responses == results["replies"][0]

View File

@ -1,3 +1,4 @@
import os
from unittest.mock import patch, Mock
from copy import deepcopy
@ -263,3 +264,56 @@ class TestGPT35Generator:
"2 out of the 4 completions have been truncated before reaching a natural "
"stopping point. Increase the max_tokens parameter to allow for longer completions."
)
@pytest.mark.skipif(
not os.environ.get("OPENAI_API_KEY", None),
reason="Export an env var called OPENAI_API_KEY containing the OpenAI API key to run this test.",
)
@pytest.mark.integration
def test_gpt35_generator_run(self):
component = GPT35Generator(api_key=os.environ.get("OPENAI_API_KEY"), n=1)
results = component.run(prompt="What's the capital of France?")
assert len(results["replies"]) == 1
assert "Paris" in results["replies"][0]
assert len(results["metadata"]) == 1
assert "gpt-3.5" in results["metadata"][0]["model"]
assert results["metadata"][0]["finish_reason"] == "stop"
@pytest.mark.skipif(
not os.environ.get("OPENAI_API_KEY", None),
reason="Export an env var called OPENAI_API_KEY containing the OpenAI API key to run this test.",
)
@pytest.mark.integration
def test_gpt35_generator_run_wrong_model_name(self):
component = GPT35Generator(
model_name="something-obviously-wrong", api_key=os.environ.get("OPENAI_API_KEY"), n=1
)
with pytest.raises(openai.InvalidRequestError, match="The model `something-obviously-wrong` does not exist"):
component.run(prompt="What's the capital of France?")
@pytest.mark.skipif(
not os.environ.get("OPENAI_API_KEY", None),
reason="Export an env var called OPENAI_API_KEY containing the OpenAI API key to run this test.",
)
@pytest.mark.integration
def test_gpt35_generator_run_streaming(self):
class Callback:
def __init__(self):
self.responses = ""
def __call__(self, chunk):
self.responses += chunk.choices[0].delta.content if chunk.choices[0].delta else ""
return chunk
callback = Callback()
component = GPT35Generator(os.environ.get("OPENAI_API_KEY"), streaming_callback=callback, n=1)
results = component.run(prompt="What's the capital of France?")
assert len(results["replies"]) == 1
assert "Paris" in results["replies"][0]
assert len(results["metadata"]) == 1
assert "gpt-3.5" in results["metadata"][0]["model"]
assert results["metadata"][0]["finish_reason"] == "stop"
assert callback.responses == results["replies"][0]