diff --git a/haystack/nodes/answer_generator/openai.py b/haystack/nodes/answer_generator/openai.py index f18571e53..003812a91 100644 --- a/haystack/nodes/answer_generator/openai.py +++ b/haystack/nodes/answer_generator/openai.py @@ -20,7 +20,7 @@ from haystack.nodes.prompt import PromptTemplate logger = logging.getLogger(__name__) -machine = platform.machine() +machine = platform.machine().lower() system = platform.system() USE_TIKTOKEN = False diff --git a/haystack/nodes/retriever/_openai_encoder.py b/haystack/nodes/retriever/_openai_encoder.py index b8093be75..09134898a 100644 --- a/haystack/nodes/retriever/_openai_encoder.py +++ b/haystack/nodes/retriever/_openai_encoder.py @@ -25,7 +25,7 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) -machine = platform.machine() +machine = platform.machine().lower() system = platform.system() USE_TIKTOKEN = False diff --git a/pyproject.toml b/pyproject.toml index 3517c161c..8bc662c3b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -88,7 +88,7 @@ dependencies = [ "elasticsearch>=7.7,<8", # OpenAI tokenizer - "tiktoken>=0.1.2; python_version >= '3.8' and (platform_machine == 'amd64' or platform_machine == 'x86_64' or (platform_machine == 'arm64' and platform_system == 'Darwin'))", + "tiktoken>=0.1.2; python_version >= '3.8' and (platform_machine == 'AMD64' or platform_machine == 'amd64' or platform_machine == 'x86_64' or (platform_machine == 'arm64' and platform_system == 'Darwin'))", # context matching "rapidfuzz>=2.0.15,<2.8.0", # FIXME https://github.com/deepset-ai/haystack/pull/3199