mirror of
https://github.com/microsoft/graphrag.git
synced 2025-06-26 23:19:58 +00:00
Fix/json dumps ascii (#873)
* Ensure ascii false in json dumps, support for non ASCII chars * Format * Semver
This commit is contained in:
parent
7376f149d2
commit
073f650ba9
@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"type": "patch",
|
||||||
|
"description": "Fix file dumps using json for non ASCII chars"
|
||||||
|
}
|
4
graphrag/index/cache/json_pipeline_cache.py
vendored
4
graphrag/index/cache/json_pipeline_cache.py
vendored
@ -44,7 +44,9 @@ class JsonPipelineCache(PipelineCache):
|
|||||||
if value is None:
|
if value is None:
|
||||||
return
|
return
|
||||||
data = {"result": value, **(debug_data or {})}
|
data = {"result": value, **(debug_data or {})}
|
||||||
await self._storage.set(key, json.dumps(data), encoding=self._encoding)
|
await self._storage.set(
|
||||||
|
key, json.dumps(data, ensure_ascii=False), encoding=self._encoding
|
||||||
|
)
|
||||||
|
|
||||||
async def has(self, key: str) -> bool:
|
async def has(self, key: str) -> bool:
|
||||||
"""Has method definition."""
|
"""Has method definition."""
|
||||||
|
@ -127,7 +127,9 @@ class SummarizeExtractor:
|
|||||||
name="summarize",
|
name="summarize",
|
||||||
variables={
|
variables={
|
||||||
self._entity_name_key: json.dumps(items),
|
self._entity_name_key: json.dumps(items),
|
||||||
self._input_descriptions_key: json.dumps(sorted(descriptions)),
|
self._input_descriptions_key: json.dumps(
|
||||||
|
sorted(descriptions), ensure_ascii=False
|
||||||
|
),
|
||||||
},
|
},
|
||||||
model_parameters={"max_tokens": self._max_summary_length},
|
model_parameters={"max_tokens": self._max_summary_length},
|
||||||
)
|
)
|
||||||
|
@ -78,7 +78,7 @@ class BlobWorkflowCallbacks(NoopWorkflowCallbacks):
|
|||||||
blob_client = self._blob_service_client.get_blob_client(
|
blob_client = self._blob_service_client.get_blob_client(
|
||||||
self._container_name, self._blob_name
|
self._container_name, self._blob_name
|
||||||
)
|
)
|
||||||
blob_client.append_block(json.dumps(log) + "\n")
|
blob_client.append_block(json.dumps(log, ensure_ascii=False) + "\n")
|
||||||
|
|
||||||
# update the blob's block count
|
# update the blob's block count
|
||||||
self._num_blocks += 1
|
self._num_blocks += 1
|
||||||
|
@ -34,13 +34,16 @@ class FileWorkflowCallbacks(NoopWorkflowCallbacks):
|
|||||||
):
|
):
|
||||||
"""Handle when an error occurs."""
|
"""Handle when an error occurs."""
|
||||||
self._out_stream.write(
|
self._out_stream.write(
|
||||||
json.dumps({
|
json.dumps(
|
||||||
|
{
|
||||||
"type": "error",
|
"type": "error",
|
||||||
"data": message,
|
"data": message,
|
||||||
"stack": stack,
|
"stack": stack,
|
||||||
"source": str(cause),
|
"source": str(cause),
|
||||||
"details": details,
|
"details": details,
|
||||||
})
|
},
|
||||||
|
ensure_ascii=False,
|
||||||
|
)
|
||||||
+ "\n"
|
+ "\n"
|
||||||
)
|
)
|
||||||
message = f"{message} details={details}"
|
message = f"{message} details={details}"
|
||||||
@ -49,14 +52,21 @@ class FileWorkflowCallbacks(NoopWorkflowCallbacks):
|
|||||||
def on_warning(self, message: str, details: dict | None = None):
|
def on_warning(self, message: str, details: dict | None = None):
|
||||||
"""Handle when a warning occurs."""
|
"""Handle when a warning occurs."""
|
||||||
self._out_stream.write(
|
self._out_stream.write(
|
||||||
json.dumps({"type": "warning", "data": message, "details": details}) + "\n"
|
json.dumps(
|
||||||
|
{"type": "warning", "data": message, "details": details},
|
||||||
|
ensure_ascii=False,
|
||||||
|
)
|
||||||
|
+ "\n"
|
||||||
)
|
)
|
||||||
_print_warning(message)
|
_print_warning(message)
|
||||||
|
|
||||||
def on_log(self, message: str, details: dict | None = None):
|
def on_log(self, message: str, details: dict | None = None):
|
||||||
"""Handle when a log message is produced."""
|
"""Handle when a log message is produced."""
|
||||||
self._out_stream.write(
|
self._out_stream.write(
|
||||||
json.dumps({"type": "log", "data": message, "details": details}) + "\n"
|
json.dumps(
|
||||||
|
{"type": "log", "data": message, "details": details}, ensure_ascii=False
|
||||||
|
)
|
||||||
|
+ "\n"
|
||||||
)
|
)
|
||||||
|
|
||||||
message = f"{message} details={details}"
|
message = f"{message} details={details}"
|
||||||
|
@ -234,7 +234,9 @@ async def run_pipeline(
|
|||||||
)
|
)
|
||||||
|
|
||||||
async def dump_stats() -> None:
|
async def dump_stats() -> None:
|
||||||
await storage.set("stats.json", json.dumps(asdict(stats), indent=4))
|
await storage.set(
|
||||||
|
"stats.json", json.dumps(asdict(stats), indent=4, ensure_ascii=False)
|
||||||
|
)
|
||||||
|
|
||||||
async def load_table_from_storage(name: str) -> pd.DataFrame:
|
async def load_table_from_storage(name: str) -> pd.DataFrame:
|
||||||
if not await storage.has(name):
|
if not await storage.has(name):
|
||||||
|
@ -224,7 +224,7 @@ class TextListSplitter(TextSplitter):
|
|||||||
"""Append the current chunk to the result."""
|
"""Append the current chunk to the result."""
|
||||||
if new_chunk and len(new_chunk) > 0:
|
if new_chunk and len(new_chunk) > 0:
|
||||||
if self._type == TextListSplitterType.JSON:
|
if self._type == TextListSplitterType.JSON:
|
||||||
chunk_list.append(json.dumps(new_chunk))
|
chunk_list.append(json.dumps(new_chunk, ensure_ascii=False))
|
||||||
else:
|
else:
|
||||||
chunk_list.append(self._output_delimiter.join(new_chunk))
|
chunk_list.append(self._output_delimiter.join(new_chunk))
|
||||||
|
|
||||||
|
@ -82,7 +82,7 @@ async def _run_extractor(
|
|||||||
rank_explanation=report.get("rating_explanation", ""),
|
rank_explanation=report.get("rating_explanation", ""),
|
||||||
summary=report.get("summary", ""),
|
summary=report.get("summary", ""),
|
||||||
findings=report.get("findings", []),
|
findings=report.get("findings", []),
|
||||||
full_content_json=json.dumps(report, indent=4),
|
full_content_json=json.dumps(report, indent=4, ensure_ascii=False),
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.exception("Error processing community: %s", community)
|
log.exception("Error processing community: %s", community)
|
||||||
|
@ -49,9 +49,11 @@ async def snapshot_rows(
|
|||||||
if fmt.format == "json":
|
if fmt.format == "json":
|
||||||
await storage.set(
|
await storage.set(
|
||||||
f"{row_name}.{extension}",
|
f"{row_name}.{extension}",
|
||||||
json.dumps(row[column])
|
(
|
||||||
|
json.dumps(row[column], ensure_ascii=False)
|
||||||
if column is not None
|
if column is not None
|
||||||
else json.dumps(row.to_dict()),
|
else json.dumps(row.to_dict(), ensure_ascii=False)
|
||||||
|
),
|
||||||
)
|
)
|
||||||
elif fmt.format == "text":
|
elif fmt.format == "text":
|
||||||
if column is None:
|
if column is None:
|
||||||
@ -65,9 +67,11 @@ async def snapshot_rows(
|
|||||||
def _parse_formats(formats: list[str | dict[str, Any]]) -> list[FormatSpecifier]:
|
def _parse_formats(formats: list[str | dict[str, Any]]) -> list[FormatSpecifier]:
|
||||||
"""Parse the formats into a list of FormatSpecifiers."""
|
"""Parse the formats into a list of FormatSpecifiers."""
|
||||||
return [
|
return [
|
||||||
|
(
|
||||||
FormatSpecifier(**fmt)
|
FormatSpecifier(**fmt)
|
||||||
if isinstance(fmt, dict)
|
if isinstance(fmt, dict)
|
||||||
else FormatSpecifier(format=fmt, extension=_get_format_extension(fmt))
|
else FormatSpecifier(format=fmt, extension=_get_format_extension(fmt))
|
||||||
|
)
|
||||||
for fmt in formats
|
for fmt in formats
|
||||||
]
|
]
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user