Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Add ClpKeyValuePairStreamHandler to support logging dictionary type log events into CLP key-value pair IR format. #46

Open
wants to merge 19 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ readme = "README.md"
requires-python = ">=3.7"
dependencies = [
"backports.zoneinfo >= 0.2.1; python_version < '3.9'",
"clp-ffi-py >= 0.0.11",
"clp-ffi-py == 0.1.0b1",
LinZhihao-723 marked this conversation as resolved.
Show resolved Hide resolved
"typing-extensions >= 3.7.4",
"tzlocal == 5.1; python_version < '3.8'",
"tzlocal >= 5.2; python_version >= '3.8'",
Expand Down
94 changes: 94 additions & 0 deletions src/clp_logging/auto_generated_kv_pairs_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
import logging
from typing import Any, Dict, Optional

ZONED_TIMESTAMP_KEY: str = "zoned_timestamp"
ZONED_TIMESTAMP_UTC_EPOCH_MS_KEY: str = "utc_epoch_ms"
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

  • Aren't all epoch timestamps UTC?
  • How about unix_timestamp_ms?
    • Technically, ms is ambiguous since it could mean Ms or ms due to capitalization, so I would rather use millisecs for the least ambiguity; but I guess that would be a larger change across our codebase.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Changed the structure to:

{
  "timestamp": {
    "unix_ts_ms": 0,
    "utc_offset_sec": 0
  }
}

Dropped ZONE_ prefix as suggested in the previous discussion.

ZONED_TIMESTAMP_TZ_KEY: str = "timezone"
LinZhihao-723 marked this conversation as resolved.
Show resolved Hide resolved
LinZhihao-723 marked this conversation as resolved.
Show resolved Hide resolved

LEVEL_KEY: str = "level"
LEVEL_NO_KEY: str = "no"
LEVEL_NAME_KEY: str = "name"

SOURCE_CONTEXT_KEY: str = "source_context"
SOURCE_CONTEXT_PATH_KEY: str = "path"
SOURCE_CONTEXT_LINE_KEY: str = "line"

LOGLIB_GENERATED_MSG_KEY: str = "loglib_generated_msg"


class AutoGeneratedKeyValuePairsBuffer:
"""
A reusable buffer for creating auto-generated key-value pairs for log
events.
LinZhihao-723 marked this conversation as resolved.
Show resolved Hide resolved

This buffer maintains a predefined dictionary structure for common metadata
fields, allowing efficient reuse without creating new dictionaries for each
log event.
LinZhihao-723 marked this conversation as resolved.
Show resolved Hide resolved
"""

def __init__(self) -> None:
self._buf: Dict[str, Any] = {
ZONED_TIMESTAMP_KEY: {
ZONED_TIMESTAMP_UTC_EPOCH_MS_KEY: None,
ZONED_TIMESTAMP_TZ_KEY: None,
},
LEVEL_KEY: {
LEVEL_NO_KEY: None,
LEVEL_NAME_KEY: None,
},
SOURCE_CONTEXT_KEY: {
SOURCE_CONTEXT_PATH_KEY: None,
SOURCE_CONTEXT_LINE_KEY: None,
},
}

def generate(
self, timestamp: int, timezone: Optional[str], record: logging.LogRecord
) -> Dict[str, Any]:
"""
Generated auto-generated key-value pairs by populating the underlying
buffer with the given log event metadata.
LinZhihao-723 marked this conversation as resolved.
Show resolved Hide resolved

:param timestamp: The Unix epoch timestamp in millisecond of the log
event.
:param timezone: The timezone of the log event, or None if not
applicable.
LinZhihao-723 marked this conversation as resolved.
Show resolved Hide resolved
:param record: The LogRecord containing metadata for the log event.
:return: The populated underlying buffer as the auto-generated key-value
pairs.
"""

self._buf[ZONED_TIMESTAMP_KEY][ZONED_TIMESTAMP_UTC_EPOCH_MS_KEY] = timestamp
self._buf[ZONED_TIMESTAMP_KEY][ZONED_TIMESTAMP_TZ_KEY] = timezone

# NOTE: We don't serialize all the metadata given by `record`. Currently, we only add the
# following metadata into auto-generated kv pairs:
# - log level
# - source context
LinZhihao-723 marked this conversation as resolved.
Show resolved Hide resolved

self._buf[LEVEL_KEY][LEVEL_NO_KEY] = record.levelno
self._buf[LEVEL_KEY][LEVEL_NAME_KEY] = record.levelname

self._buf[SOURCE_CONTEXT_KEY][SOURCE_CONTEXT_PATH_KEY] = record.pathname
self._buf[SOURCE_CONTEXT_KEY][SOURCE_CONTEXT_LINE_KEY] = record.lineno

return self._buf


def create_loglib_generated_log_event_as_auto_generated_kv_pairs(
timestamp: int, timezone: Optional[str], msg: str
) -> Dict[str, Any]:
"""
:param timestamp: The Unix epoch timestamp in millisecond of the log event.
:param timezone: The timezone of the log event, or None if not applicable.
LinZhihao-723 marked this conversation as resolved.
Show resolved Hide resolved
:param msg: The log message generated by the logging library.
:return: The auto-generated key-value pairs that represents a log event generated by the logging
library itself.
LinZhihao-723 marked this conversation as resolved.
Show resolved Hide resolved
"""
return {
ZONED_TIMESTAMP_KEY: {
ZONED_TIMESTAMP_UTC_EPOCH_MS_KEY: timestamp,
ZONED_TIMESTAMP_TZ_KEY: timezone,
},
LOGLIB_GENERATED_MSG_KEY: msg,
}
181 changes: 177 additions & 4 deletions src/clp_logging/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,25 @@
import socket
import sys
import time
import warnings
from abc import ABCMeta, abstractmethod
from math import floor
from pathlib import Path
from queue import Empty, Queue
from signal import SIGINT, signal, SIGTERM
from threading import Thread, Timer
from types import FrameType
from typing import Callable, ClassVar, Dict, IO, Optional, Tuple, Union
from typing import Any, Callable, ClassVar, Dict, IO, Optional, Tuple, Union

import tzlocal
from clp_ffi_py.ir import FourByteEncoder
from clp_ffi_py.ir import FourByteEncoder, Serializer
from clp_ffi_py.utils import serialize_dict_to_msgpack
from zstandard import FLUSH_FRAME, ZstdCompressionWriter, ZstdCompressor

from clp_logging.auto_generated_kv_pairs_utils import (
AutoGeneratedKeyValuePairsBuffer,
create_loglib_generated_log_event_as_auto_generated_kv_pairs,
)
from clp_logging.protocol import (
BYTE_ORDER,
EOF_CHAR,
Expand All @@ -31,6 +37,8 @@

DEFAULT_LOG_FORMAT: str = " %(levelname)s %(name)s %(message)s"
WARN_PREFIX: str = " [WARN][clp_logging]"
AUTO_GENERATED_KV_PAIRS_KEY: str = "auto_generated_kv_pairs"
USER_GENERATED_KV_PAIRS_KEY: str = "user_generated_kv_pairs"


def _init_timeinfo(fmt: Optional[str], tz: Optional[str]) -> Tuple[str, str]:
Expand Down Expand Up @@ -216,11 +224,11 @@ def __init__(
self.timeout_fn: Callable[[], None] = timeout_fn
self.next_hard_timeout_ts: int = ULONG_MAX
self.min_soft_timeout_delta: int = ULONG_MAX
self.ostream: Optional[Union[ZstdCompressionWriter, IO[bytes]]] = None
self.ostream: Optional[Union[ZstdCompressionWriter, IO[bytes], Serializer]] = None
self.hard_timeout_thread: Optional[Timer] = None
self.soft_timeout_thread: Optional[Timer] = None

def set_ostream(self, ostream: Union[ZstdCompressionWriter, IO[bytes]]) -> None:
def set_ostream(self, ostream: Union[ZstdCompressionWriter, IO[bytes], Serializer]) -> None:
self.ostream = ostream

def timeout(self) -> None:
Expand Down Expand Up @@ -792,3 +800,168 @@ def __init__(
super().__init__(
open(fpath, mode), enable_compression, timestamp_format, timezone, loglevel_timeout
)


class ClpKeyValuePairStreamHandler(logging.Handler):
"""
A custom logging handler that processes log events containing key-value
pairs and serializes them into the CLP key-value pair IR format.
LinZhihao-723 marked this conversation as resolved.
Show resolved Hide resolved

Differences from `logging.StreamHandler`:
- Expects log events (`logging.LogRecord`) to include key-value pairs represented as a Python
dictionary.
Comment on lines +810 to +811
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this a difference since StreamHandler accepts a LogRecord and LogRecord can have the msg be an arbitrary object?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah, I realized this would cause some confusion. We should probably say this is what differs from the current CLP logging handlers.

- Serializes the key-value pairs into the CLP key-value pair IR format before writing to the
stream.

Rules for key-value pair representation:
- Key:
- Must be of type `str`.
- Value:
- Must be one of the following types:
- Primitive types: `int`, `float`, `str`, `bool`, or `None`.
- Arrays:
- May contain primitive values, dictionaries, or nested arrays.
- Can be empty.
- Dictionaries:
- Must adhere to the same key-value rules.
- Can be empty.

Comment on lines +815 to +827
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

NTS: This needs refactoring.

:param stream: A writable byte output stream to which the handler will write the serialized IR
byte sequences.
LinZhihao-723 marked this conversation as resolved.
Show resolved Hide resolved
:param enable_compression: Whether to compress the serialized IR byte sequences using zstd.
:param loglevel_timeout: Customized timeout configuration.
"""

def __init__(
self,
stream: IO[bytes],
enable_compression: bool = True,
timezone: Optional[str] = None,
loglevel_timeout: Optional[CLPLogLevelTimeout] = None,
) -> None:
super().__init__()

self._enable_compression: bool = enable_compression
self._tz: Optional[str] = timezone
self._loglevel_timeout: Optional[CLPLogLevelTimeout] = loglevel_timeout
self._serializer: Optional[Serializer] = None
self._formatter: Optional[logging.Formatter] = None
self._ostream: IO[bytes] = stream

self._auto_generated_kv_pairs_buf: AutoGeneratedKeyValuePairsBuffer = (
AutoGeneratedKeyValuePairsBuffer()
)

self._init_new_serializer(stream)

# override
def setFormatter(self, fmt: Optional[logging.Formatter]) -> None:
if fmt is None:
return
warnings.warn(
f"Formatter is currently not supported in the current {self.__class__.__name__}",
category=RuntimeWarning,
)
self._formatter = fmt

# override
def emit(self, record: logging.LogRecord) -> None:
"""
Overrides `logging.Handler.emit` to ensure `logging.Handler.handleError`
is always called and avoid requiring a `logging.LogRecord` to call
internal writing functions.
Comment on lines +865 to +866
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't understand the "avoid requiring...`" part.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is directly copied from the existing docstrings here:

def emit(self, record: logging.LogRecord) -> None:

Do u want me to rewrite it for this new handler?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Uh, not necessarily. I'm just confused about the docstring is supposed to mean? Should we ask David?

Copy link
Member Author

@LinZhihao-723 LinZhihao-723 Feb 8, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think what the original comment means is to avoid the default emit implementation to call output stream-level write directly: https://github.com/python/cpython/blob/c1f352bf0813803bb795b796c16040a5cd4115f2/Lib/logging/__init__.py#L1138


:param record: The log event to serialize.
"""
try:
self._write(record)
except Exception:
self.handleError(record)

# Added to `logging.StreamHandler` in python 3.7
LinZhihao-723 marked this conversation as resolved.
Show resolved Hide resolved
# override
def setStream(self, stream: IO[bytes]) -> Optional[IO[bytes]]:
if stream is self._ostream:
return None
old_stream: IO[bytes] = self._ostream
self._ostream = stream
# TODO: The following call will close the old stream. However, `logging.StreamHandler`'s
# implementation will only flush the stream but leave it opened. To support this behaviour,
# we need `clp_ffi_py.ir.Serializer` to allow closing the serializer without closing the
# underlying output stream.
self._init_new_serializer(stream)
return old_stream

# override
def close(self) -> None:
if self._serializer is None:
LinZhihao-723 marked this conversation as resolved.
Show resolved Hide resolved
return
if self._loglevel_timeout:
self._loglevel_timeout.timeout()
# NOTE: Closing the serializer will ensure that any buffered results are flushed and the
# underlying output stream is properly closed.
self._serializer.close()
self._serializer = None
super().close()

def _is_closed(self) -> bool:
return self._serializer is None

def _init_new_serializer(self, stream: IO[bytes]) -> None:
"""
Initializes a new serializer that writes to the given stream.

:param stream: The stream that the underlying serializer will write to.
"""
cctx: ZstdCompressor = ZstdCompressor()
self._serializer = Serializer(
LinZhihao-723 marked this conversation as resolved.
Show resolved Hide resolved
cctx.stream_writer(stream) if self._enable_compression else stream
)

def _write(self, record: logging.LogRecord) -> None:
"""
Writes the log event into the underlying serializer.

:param record: The log event to serialize.
:raise IOError: If the handler has been already closed.
:raise TypeError: If `record.msg` is not a Python dictionary.
"""
if self._is_closed():
raise IOError("The handler has been closed.")
LinZhihao-723 marked this conversation as resolved.
Show resolved Hide resolved
LinZhihao-723 marked this conversation as resolved.
Show resolved Hide resolved

if not isinstance(record.msg, dict):
raise TypeError("The log msg must be a valid Python dictionary.")

curr_ts: int = floor(time.time() * 1000)

if self._loglevel_timeout is not None:
self._loglevel_timeout.update(record.levelno, curr_ts, self._log_level_timeout_callback)

self._serialize_kv_pair_log_event(
self._auto_generated_kv_pairs_buf.generate(curr_ts, self._tz, record), record.msg
)

def _serialize_kv_pair_log_event(
self, auto_generated_kv_pairs: Dict[str, Any], user_generated_kv_pairs: Dict[str, Any]
) -> None:
"""
:param auto_generated_kv_pairs: A dict of auto generated kv pairs.
:param user_generated_kv_pairs: A dict of user generated kv pairs.
"""
log_event: Dict[str, Any] = {
AUTO_GENERATED_KV_PAIRS_KEY: auto_generated_kv_pairs,
USER_GENERATED_KV_PAIRS_KEY: user_generated_kv_pairs,
}
LinZhihao-723 marked this conversation as resolved.
Show resolved Hide resolved
assert self._serializer is not None
LinZhihao-723 marked this conversation as resolved.
Show resolved Hide resolved
self._serializer.serialize_log_event_from_msgpack_map(serialize_dict_to_msgpack(log_event))

def _log_level_timeout_callback(self, msg: str) -> None:
"""
Callback for `CLPLogLevelTimeout` to log internal errors.

:param msg: The message sent from `CLPLogLevelTimeout`.`
"""
curr_ts: int = floor(time.time() * 1000)
self._serialize_kv_pair_log_event(
create_loglib_generated_log_event_as_auto_generated_kv_pairs(curr_ts, self._tz, msg), {}
)
9 changes: 8 additions & 1 deletion tests/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
import unittest
from typing import Iterable, Optional, Union

from tests.test_handlers import TestCLPBase, TestCLPSegmentStreamingBase
from tests.test_handlers import (
TestCLPBase,
TestClpKeyValuePairLoggingBase,
TestCLPSegmentStreamingBase,
)


def add_tests(suite: unittest.TestSuite, loader: unittest.TestLoader, test_class: type) -> None:
Expand Down Expand Up @@ -35,4 +39,7 @@ def load_tests(
for seg_test_class in TestCLPSegmentStreamingBase.__subclasses__():
add_tests(suite, loader, seg_test_class)

for kv_pair_handler_test_class in TestClpKeyValuePairLoggingBase.__subclasses__():
add_tests(suite, loader, kv_pair_handler_test_class)

return suite
Loading
Loading