fix: logs containing JSON getting lost (#8456)

* fix: logs getting lost

* add test

* add reno
This commit is contained in:
tstadel 2024-10-15 14:11:14 +02:00 committed by GitHub
parent b40f0c8b5d
commit 8613bb7653
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 30 additions and 1 deletions

View File

@ -190,7 +190,10 @@ def patch_make_records_to_use_kwarg_string_interpolation(original_make_records:
@functools.wraps(original_make_records)
def _wrapper(name, level, fn, lno, msg, args, exc_info, func=None, extra=None, sinfo=None) -> Any:
safe_extra = extra or {}
interpolated_msg = msg.format(**safe_extra)
try:
interpolated_msg = msg.format(**safe_extra)
except (KeyError, ValueError):
interpolated_msg = msg
return original_make_records(name, level, fn, lno, interpolated_msg, (), exc_info, func, extra, sinfo)
return _wrapper

View File

@ -0,0 +1,4 @@
---
fixes:
- |
Fixes logs containing JSON data getting lost due to string interpolation.

View File

@ -490,6 +490,28 @@ class TestCompositeLogger:
"module": "test.test_logging",
}
def test_log_json_content(self, capfd: LogCaptureFixture) -> None:
haystack_logging.configure_logging(use_json=True)
logger = haystack_logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.log(logging.DEBUG, 'Hello, structured: {"key": "value"}', key="logging", key1="value1", key2="value2")
output = capfd.readouterr().err
parsed_output = json.loads(output)
assert parsed_output == {
"event": 'Hello, structured: {"key": "value"}',
"key": "logging",
"key1": "value1",
"key2": "value2",
"level": "debug",
"timestamp": ANY,
"lineno": ANY,
"module": "test.test_logging",
}
def test_log_with_string_cast(self, capfd: LogCaptureFixture) -> None:
haystack_logging.configure_logging(use_json=True)