mirror of
https://github.com/deepset-ai/haystack.git
synced 2026-01-08 04:56:45 +00:00
fix: fix mypy issue in OpenAIChatGenerator that appears since mypy 1.16 (#9456)
* fix mypy issue with 1.16 * add release note
This commit is contained in:
parent
aa3d046775
commit
5c2e2445d3
@ -421,13 +421,19 @@ class OpenAIChatGenerator:
|
||||
chunks: List[StreamingChunk] = []
|
||||
chunk = None
|
||||
chunk_delta: StreamingChunk
|
||||
last_chunk: Optional[ChatCompletionChunk] = None
|
||||
|
||||
for chunk in chat_completion: # pylint: disable=not-an-iterable
|
||||
assert len(chunk.choices) <= 1, "Streaming responses should have at most one choice."
|
||||
chunk_delta = self._convert_chat_completion_chunk_to_streaming_chunk(chunk)
|
||||
chunks.append(chunk_delta)
|
||||
callback(chunk_delta)
|
||||
return [self._convert_streaming_chunks_to_chat_message(chunk, chunks)]
|
||||
last_chunk = chunk
|
||||
|
||||
if not last_chunk:
|
||||
raise ValueError("No chunks received from the stream")
|
||||
|
||||
return [self._convert_streaming_chunks_to_chat_message(last_chunk, chunks)]
|
||||
|
||||
async def _handle_async_stream_response(
|
||||
self, chat_completion: AsyncStream, callback: AsyncStreamingCallbackT
|
||||
@ -435,13 +441,19 @@ class OpenAIChatGenerator:
|
||||
chunks: List[StreamingChunk] = []
|
||||
chunk = None
|
||||
chunk_delta: StreamingChunk
|
||||
last_chunk: Optional[ChatCompletionChunk] = None
|
||||
|
||||
async for chunk in chat_completion: # pylint: disable=not-an-iterable
|
||||
assert len(chunk.choices) <= 1, "Streaming responses should have at most one choice."
|
||||
chunk_delta = self._convert_chat_completion_chunk_to_streaming_chunk(chunk)
|
||||
chunks.append(chunk_delta)
|
||||
await callback(chunk_delta)
|
||||
return [self._convert_streaming_chunks_to_chat_message(chunk, chunks)]
|
||||
last_chunk = chunk
|
||||
|
||||
if not last_chunk:
|
||||
raise ValueError("No chunks received from the stream")
|
||||
|
||||
return [self._convert_streaming_chunks_to_chat_message(last_chunk, chunks)]
|
||||
|
||||
def _check_finish_reason(self, meta: Dict[str, Any]) -> None:
|
||||
if meta["finish_reason"] == "length":
|
||||
|
||||
4
releasenotes/notes/fix-mypy-1-16-8ed5d200c9e2018f.yaml
Normal file
4
releasenotes/notes/fix-mypy-1-16-8ed5d200c9e2018f.yaml
Normal file
@ -0,0 +1,4 @@
|
||||
---
|
||||
fixes:
|
||||
- |
|
||||
Fixed a mypy issue in the OpenAIChatGenerator and its handling of stream responses. This issue only occurs with mypy >=1.16.0.
|
||||
Loading…
x
Reference in New Issue
Block a user