diff --git a/.github/workflows/test-suite.yml b/.github/workflows/test-suite.yml
index f4b3b41..8004326 100644
--- a/.github/workflows/test-suite.yml
+++ b/.github/workflows/test-suite.yml
@@ -16,35 +16,18 @@ jobs:
steps:
- uses: actions/checkout@v4
- - uses: actions/setup-python@v5
- with:
- python-version: "3.12"
-
- - name: Install dependencies
- run: |
- python -m pip install --upgrade pip
- pip install tox
+ - uses: astral-sh/setup-uv@v3
- name: Lint with tox
- run: tox -e lint
+ run: uvx tox -e lint
test:
- name: "Python Test ${{matrix.python-version}} ${{ matrix.os }}"
+ name: "Python Test ${{ matrix.os }}"
needs: [lint]
runs-on: "${{ matrix.os }}"
strategy:
+ fail-fast: false # allow tests to run on all platforms
matrix:
- python-version:
- - "pypy-3.7"
- - "pypy-3.8"
- - "pypy-3.9"
- - "pypy-3.10"
- - "3.7"
- - "3.8"
- - "3.9"
- - "3.10"
- - "3.11"
- - "3.12"
os:
- ubuntu-latest
- windows-latest
@@ -52,15 +35,7 @@ jobs:
steps:
- uses: actions/checkout@v4
-
- - uses: actions/setup-python@v5
- with:
- python-version: ${{ matrix.python-version }}
-
- - name: Install dependencies
- run: |
- python -m pip install --upgrade pip
- pip install tox tox-gh-actions
+ - uses: astral-sh/setup-uv@v3
- name: Test with tox
- run: tox
+ run: uvx tox
diff --git a/.gitignore b/.gitignore
index 051f709..97ad525 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,6 +2,7 @@
*.swp
build
dist
+dist_uploaded
*.egg-info
# Tests and validation
@@ -16,3 +17,6 @@ env
# IDE
.vscode
.idea
+
+# generated docs
+site
diff --git a/CHANGELOG.md b/CHANGELOG.md
deleted file mode 100644
index 4c886d3..0000000
--- a/CHANGELOG.md
+++ /dev/null
@@ -1,96 +0,0 @@
-# Changelog
-All notable changes to this project will be documented in this file.
-
-The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
-and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
-
-## [3.0.1](https://github.com/nhairs/python-json-logger/compare/v3.0.0...v3.0.1) - 2023-04-01
-
-### Fixes
-
-- Fix spelling of parameter `json_serialiser` -> `json_serializer` (#170) - @juliangilbey
-
-## [3.0.0](https://github.com/nhairs/python-json-logger/compare/v2.0.7...v3.0.0) - 2024-03-25
-
-Note: using new major version to seperate changes from this fork and the original (upstream). See #1 for details.
-
-### Changes
-- Update supported Python versions - @nhairs
- - Drop 3.6
- - The following versions are supported and tested:
- - CPython 3.7-3.12 (ubuntu, windows, mac)
- - PyPy 3.7-3.10 (ubuntu, wundows, mac)
- - `RESERVED_ATTRS` is now a list and version dependent
-- Fix `JsonFormatter.__init__` return type (`None`) - @nhairs
-- Moved to `pyproject.toml` - @nhairs
-- Update linting and testing - @nhairs
- - Split lint and test steps in GHA
- - Use validate-pyproject, black, pylint, mypy
-
-## [2.0.7](https://github.com/nhairs/python-json-logger/compare/v2.0.6...v2.0.7) - 2023-02-21
-### Changed
-- Fix inclusion of py.typed in pip packages - @sth
-- Added pytest support with test file rename. Migrated to assertEqual
-
-## [2.0.6](https://github.com/nhairs/python-json-logger/compare/v2.0.5...v2.0.6) - 2023-02-14
-### Changed
-- Parameter `rename_fields` in merge_record_extra is now optional - @afallou
-
-## [2.0.5](https://github.com/nhairs/python-json-logger/compare/v2.0.4...v2.0.5) - 2023-02-12
-### Added
-- Allow reserved attrs to be renamed - @henkhogan
-- Support added for Python 3.11
-- Now verifying builds in Pypy 3.9 as well
-- Type annotations are now in the package - @louis-jaris
-### Changed
-- Fix rename_fields for exc_info - @guilhermeferrari
-- Cleaned up test file for PEP8 - @lopagela
-- Cleaned up old Python 2 artifacts - @louis-jaris
-- Dropped Python 3.5 support - @idomozes
-- Moved type check via tox into 3.11 run only
-- Added test run in Python3.6 (will keep for a little while longer, but it's EOL so upgrade)
-
-## [2.0.4](https://github.com/nhairs/python-json-logger/compare/v2.0.3...v2.0.4) - 2022-07-11
-### Changed
-- Fix too strict regex for percentage style logging - @aberres
-
-## [2.0.3](https://github.com/nhairs/python-json-logger/compare/v2.0.2...v2.0.3) - 2022-07-08
-### Added
-- Add PEP 561 marker/basic mypy configuration. - @bringhurst
-- Workaround logging.LogRecord.msg type of string. - @bringhurst
-### Changed
-- Changed a link archive of the reference page in case it's down. - @ahonnecke
-- Removed unnecessary try-except around OrderedDict usage - @sozofaan
-- Update documentation link to json module + use https - @deronnax
-- Dropped 3.5 support. - @bringhurst
-
-## [2.0.2](https://github.com/nhairs/python-json-logger/compare/v2.0.1...v2.0.2) - 2021-07-27
-### Added
-- Officially supporting 3.9 - @felixonmars.
-- You can now add static fields to log objects - @cosimomeli.
-### Changed
-- Dropped 3.4 support.
-- Dropped Travis CI for Github Actions.
-- Wheel should build for python 3 instead of just 3.4 now.
-
-## [2.0.1](https://github.com/nhairs/python-json-logger/compare/v2.0.0...v2.0.1) - 2020-10-12
-### Added
-- Support Pypi long descripton - @ereli-cb
-### Changed
-- You can now rename output fields - @schlitzered
-
-## [2.0.0](https://github.com/nhairs/python-json-logger/compare/v0.1.11...v2.0.0) - 2020-09-26
-### Added
-- New Changelog
-- Added timezone support to timestamps - @lalten
-- Refactored log record to function - @georgysavva
-- Add python 3.8 support - @tommilligan
-### Removed
-- Support for Python 2.7
-- Debian directory
-
-## [0.1.11](https://github.com/nhairs/python-json-logger/compare/v0.1.10...v0.1.11) - 2019-03-29
-### Added
-- Support for Python 3.7
-### Changed
-- 'stack_info' flag in logging calls is now respected in JsonFormatter by [@ghShu](https://github.com/ghShu)
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
new file mode 120000
index 0000000..d0fcfe9
--- /dev/null
+++ b/CODE_OF_CONDUCT.md
@@ -0,0 +1 @@
+docs/contributing.md
\ No newline at end of file
diff --git a/NOTICE b/NOTICE
new file mode 100644
index 0000000..cfa67dd
--- /dev/null
+++ b/NOTICE
@@ -0,0 +1,5 @@
+This software includes the following licenced software:
+ - mkdocstrings-python
+ Copyright (c) 2021, Timothée Mazzucotelli
+ Licenced under ISC Licence
+ Source: https://github.com/mkdocstrings/python
diff --git a/README.md b/README.md
index c7369b9..628f575 100644
--- a/README.md
+++ b/README.md
@@ -1,193 +1,23 @@
+[](https://pypi.python.org/pypi/python-json-logger/)
+[](https://pypi.python.org/pypi/python-json-logger/)
+[](https://pypi.python.org/pypi/python-json-logger/)
+[](https://github.com/nhairs/python-json-logger)
+[](https://github.com/nhairs/python-json-logger)

-[](https://pypi.python.org/pypi/python-json-logger/)
-[](https://pypi.python.org/pypi/python-json-logger/)
-
+#
# Python JSON Logger
-This library is provided to allow standard python logging to output log data as json objects. With JSON we can make our logs more readable by machines and we can stop writing custom parsers for syslog type records.
-
-
-### 🚨 Important 🚨
-
-This repository is a maintained fork of [madzak/python-json-logger](https://github.com/madzak/python-json-logger) pending [a PEP 541 request](https://github.com/pypi/support/issues/3607) for the PyPI package. The future direction of the project is being discussed [here](https://github.com/nhairs/python-json-logger/issues/1).
-
-[**Changelog**](https://github.com/nhairs/python-json-logger/blob/main/CHANGELOG.md)
-
-## Installation
-
-Note: All versions of this fork use version `>=3.0.0` - to use pre-fork versions use `python-json-logger<3.0.0`.
-
-### Install via pip
-
-Until the PEP 541 request is complete you will need to install directly from github.
-
-#### Install from GitHub
-
-To install from releases:
-
-```shell
-# 3.0.0 wheel
-pip install 'python-json-logger@https://github.com/nhairs/python-json-logger/releases/download/v3.0.0/python_json_logger-3.0.0-py3-none-any.whl'
-```
-
-To install from head:
-
-```shell
-pip install 'python-json-logger@git+https://github.com/nhairs/python-json-logger.git'
-```
-
-To install a specific version from a tag:
-
-```shell
-# Last released version before forking
-pip install 'python-json-logger@git+https://github.com/nhairs/python-json-logger.git@v2.0.7'
-```
-
-#### Install from Source
-
-```shell
-git clone https://github.com/nhairs/python-json-logger.git
-cd python-json-logger
-pip install -e .
-```
-
-## Usage
-
-### Integrating with Python's logging framework
-
-Json outputs are provided by the JsonFormatter logging formatter. You can add the custom formatter like below:
-
-```python
- import logging
- from pythonjsonlogger import jsonlogger
-
- logger = logging.getLogger()
-
- logHandler = logging.StreamHandler()
- formatter = jsonlogger.JsonFormatter()
- logHandler.setFormatter(formatter)
- logger.addHandler(logHandler)
-```
-
-### Customizing fields
-
-The fmt parser can also be overidden if you want to have required fields that differ from the default of just `message`.
-
-These two invocations are equivalent:
-
-```python
-class CustomJsonFormatter(jsonlogger.JsonFormatter):
- def parse(self):
- return self._fmt.split(';')
-
-formatter = CustomJsonFormatter('one;two')
-
-# is equivalent to:
-
-formatter = jsonlogger.JsonFormatter('%(one)s %(two)s')
-```
-
-You can also add extra fields to your json output by specifying a dict in place of message, as well as by specifying an `extra={}` argument.
-
-Contents of these dictionaries will be added at the root level of the entry and may override basic fields.
-
-You can also use the `add_fields` method to add to or generally normalize the set of default set of fields, it is called for every log event. For example, to unify default fields with those provided by [structlog](http://www.structlog.org/) you could do something like this:
-
-```python
-class CustomJsonFormatter(jsonlogger.JsonFormatter):
- def add_fields(self, log_record, record, message_dict):
- super(CustomJsonFormatter, self).add_fields(log_record, record, message_dict)
- if not log_record.get('timestamp'):
- # this doesn't use record.created, so it is slightly off
- now = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ')
- log_record['timestamp'] = now
- if log_record.get('level'):
- log_record['level'] = log_record['level'].upper()
- else:
- log_record['level'] = record.levelname
-
-formatter = CustomJsonFormatter('%(timestamp)s %(level)s %(name)s %(message)s')
-```
-
-Items added to the log record will be included in *every* log message, no matter what the format requires.
-
-### Adding custom object serialization
-
-For custom handling of object serialization you can specify default json object translator or provide a custom encoder
-
-```python
-def json_translate(obj):
- if isinstance(obj, MyClass):
- return {"special": obj.special}
-
-formatter = jsonlogger.JsonFormatter(json_default=json_translate,
- json_encoder=json.JSONEncoder)
-logHandler.setFormatter(formatter)
-
-logger.info({"special": "value", "run": 12})
-logger.info("classic message", extra={"special": "value", "run": 12})
-```
-
-### Using a Config File
-
-To use the module with a config file using the [`fileConfig` function](https://docs.python.org/3/library/logging.config.html#logging.config.fileConfig), use the class `pythonjsonlogger.jsonlogger.JsonFormatter`. Here is a sample config file.
-
-```ini
-[loggers]
-keys = root,custom
-
-[logger_root]
-handlers =
-
-[logger_custom]
-level = INFO
-handlers = custom
-qualname = custom
-
-[handlers]
-keys = custom
-
-[handler_custom]
-class = StreamHandler
-level = INFO
-formatter = json
-args = (sys.stdout,)
-
-[formatters]
-keys = json
+Python JSON Logger enables you produce JSON logs when using Python's `logging` package.
-[formatter_json]
-format = %(message)s
-class = pythonjsonlogger.jsonlogger.JsonFormatter
-```
+JSON logs are machine readable allowing for much easier parsing and ingestion into log aggregation tools.
-## Example Output
-Sample JSON with a full formatter (basically the log message from the unit test). Every log message will appear on 1 line like a typical logger.
+## Documentation
-```json
-{
- "threadName": "MainThread",
- "name": "root",
- "thread": 140735202359648,
- "created": 1336281068.506248,
- "process": 41937,
- "processName": "MainProcess",
- "relativeCreated": 9.100914001464844,
- "module": "tests",
- "funcName": "testFormatKeys",
- "levelno": 20,
- "msecs": 506.24799728393555,
- "pathname": "tests/tests.py",
- "lineno": 60,
- "asctime": ["12-05-05 22:11:08,506248"],
- "message": "testing logging format",
- "filename": "tests.py",
- "levelname": "INFO",
- "special": "value",
- "run": 12
-}
-```
+- [Documentation](https://nhairs.github.io/python-json-logger/latest/)
+- [Quickstart Guide](https://nhairs.github.io/python-json-logger/latest/quickstart/)
+- [Change Log](https://nhairs.github.io/python-json-logger/latest/changelog/)
+- [Contributing](https://nhairs.github.io/python-json-logger/latest/contributing/)
## License
diff --git a/SECURITY.md b/SECURITY.md
deleted file mode 100644
index d44d8f0..0000000
--- a/SECURITY.md
+++ /dev/null
@@ -1,10 +0,0 @@
-# Security Policy
-
-## Supported Versions
-
-**TLDR**: Security support is provided for Python versions `>=3.7`.
-
-
-## Reporting a Vulnerability
-
-Please report vulnerabilties using GitHub [here](https://github.com/nhairs/python-json-logger/security/advisories/new).
diff --git a/SECURITY.md b/SECURITY.md
new file mode 120000
index 0000000..6ac9ff1
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1 @@
+docs/security.md
\ No newline at end of file
diff --git a/docs/changelog.md b/docs/changelog.md
new file mode 100644
index 0000000..03a9b71
--- /dev/null
+++ b/docs/changelog.md
@@ -0,0 +1,184 @@
+# Change Log
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
+and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+
+## [UNRELEASED]
+
+### Added
+- Support `DictConfigurator` prefixes for `rename_fields` and `static_fields`. [#45](https://github.com/nhairs/python-json-logger/pull/45)
+ - Allows using values like `ext://sys.stderr` in `fileConfig`/`dictConfig` value fields.
+
+Thanks @rubensa
+
+## [3.3.0](https://github.com/nhairs/python-json-logger/compare/v3.2.1...v3.3.0) - 2025-03-06
+
+### Added
+- `exc_info_as_array` and `stack_info_as_array` options are added to `pythonjsonlogger.core.BaseJsonFormatter` allowing both to be encoded as list of lines instead of a single multi-line string. [#35](https://github.com/nhairs/python-json-logger/issues/35)
+
+### Security
+- Remove `msgspec-python313-pre` from `dev` dependencies preventing potential RCE. Details: [GHSA-wmxh-pxcx-9w24](https://github.com/nhairs/python-json-logger/security/advisories/GHSA-wmxh-pxcx-9w24#advisory-comment-121307)
+
+Thanks @1hakusai1 and @omnigodz
+
+## [3.2.1](https://github.com/nhairs/python-json-logger/compare/v3.2.0...v3.2.1) - 2024-12-16
+
+### Fixed
+- Import error on `import pythonjsonlogger.jsonlogger` [#29](https://github.com/nhairs/python-json-logger/issues/29)
+
+
+## [3.2.0](https://github.com/nhairs/python-json-logger/compare/v3.1.0...v3.2.0) - 2024-12-11
+
+### Changed
+- `pythonjsonlogger.[ORJSON,MSGSPEC]_AVAILABLE` no longer imports the respective package when determining availability.
+- `pythonjsonlogger.[orjson,msgspec]` now throws a `pythonjsonlogger.exception.MissingPackageError` when required libraries are not available. These contain more information about what is missing whilst still being an `ImportError`.
+- `defaults` parameter is no longer ignored and now conforms to the standard library. Setting a defaults dictionary will add the specified keys if the those keys do not exist in a record or weren't passed by the `extra` parameter when logging a message.
+- `typing_extensions` is only installed on Python version < 3.10.
+- Support Python 3.13
+ - `msgspec` has only been tested against pre-release versions.
+
+Thanks @cjwatson and @bharel
+
+## [3.1.0](https://github.com/nhairs/python-json-logger/compare/v3.0.1...v3.1.0) - 2023-05-28
+
+This splits common funcitonality out to allow supporting other JSON encoders. Although this is a large refactor, backwards compatibility has been maintained.
+
+### Added
+- `pythonjsonlogger.core` - more details below.
+- `pythonjsonlogger.defaults` module that provides many functions for handling unsupported types.
+- Orjson encoder support via `pythonjsonlogger.orjson.OrjsonFormatter` with the following additions:
+ - bytes are URL safe base64 encoded.
+ - Exceptions are "pretty printed" using the exception name and message e.g. `"ValueError: bad value passed"`
+ - Enum values use their value, Enum classes now return all values as a list.
+ - Tracebacks are supported
+ - Classes (aka types) are support
+ - Will fallback on `__str__` if available, else `__repr__` if available, else will use `__could_not_encode__`
+- MsgSpec encoder support via `pythonjsonlogger.msgspec.MsgspecFormatter` with the following additions:
+ - Exceptions are "pretty printed" using the exception name and message e.g. `"ValueError: bad value passed"`
+ - Enum classes now return all values as a list.
+ - Tracebacks are supported
+ - Classes (aka types) are support
+ - Will fallback on `__str__` if available, else `__repr__` if available, else will use `__could_not_encode__`
+ - Note: msgspec only supprts enum values of type `int` or `str` [jcrist/msgspec#680](https://github.com/jcrist/msgspec/issues/680)
+
+### Changed
+- `pythonjsonlogger.jsonlogger` has been moved to `pythonjsonlogger.json` with core functionality moved to `pythonjsonlogger.core`.
+- `pythonjsonlogger.core.BaseJsonFormatter` properly supports all `logging.Formatter` arguments:
+ - `fmt` is unchanged.
+ - `datefmt` is unchanged.
+ - `style` can now support non-standard arguments by setting `validate` to `False`
+ - `validate` allows non-standard `style` arguments or prevents calling `validate` on standard `style` arguments.
+ - `default` is ignored.
+- `pythonjsonlogger.json.JsonFormatter` default encodings changed:
+ - bytes are URL safe base64 encoded.
+ - Exception formatting detected using `BaseException` instead of `Exception`. Now "pretty prints" the exception using the exception name and message e.g. `"ValueError: bad value passed"`
+ - Dataclasses are now supported
+ - Enum values now use their value, Enum classes now return all values as a list.
+ - Will fallback on `__str__` if available, else `__repr__` if available, else will use `__could_not_encode__`
+- Renaming fields now preserves order ([#7](https://github.com/nhairs/python-json-logger/issues/7)) and ignores missing fields ([#6](https://github.com/nhairs/python-json-logger/issues/6)).
+- Documentation
+ - Generated documentation using `mkdocs` is stored in `docs/`
+ - Documentation within `README.md` has been moved to `docs/index.md` and `docs/qucikstart.md`.
+ - `CHANGELOG.md` has been moved to `docs/change-log.md`
+ - `SECURITY.md` has been moved and replaced with a symbolic link to `docs/security.md`.
+
+### Deprecated
+- `pythonjsonlogger.jsonlogger` is now `pythonjsonlogger.json`
+- `pythonjsonlogger.jsonlogger.RESERVED_ATTRS` is now `pythonjsonlogger.core.RESERVED_ATTRS`.
+- `pythonjsonlogger.jsonlogger.merge_record_extra` is now `pythonjsonlogger.core.merge_record_extra`.
+
+### Removed
+- Python 3.7 support dropped
+- `pythonjsonlogger.jsonlogger.JsonFormatter._str_to_fn` replaced with `pythonjsonlogger.core.str_to_object`.
+
+## [3.0.1](https://github.com/nhairs/python-json-logger/compare/v3.0.0...v3.0.1) - 2023-04-01
+
+### Fixes
+
+- Fix spelling of parameter `json_serialiser` -> `json_serializer` ([#8](https://github.com/nhairs/python-json-logger/issues/8)) - @juliangilbey
+
+## [3.0.0](https://github.com/nhairs/python-json-logger/compare/v2.0.7...v3.0.0) - 2024-03-25
+
+Note: using new major version to seperate changes from this fork and the original (upstream). See [#1](https://github.com/nhairs/python-json-logger/issues/1) for details.
+
+### Changes
+- Update supported Python versions - @nhairs
+ - Drop 3.6
+ - The following versions are supported and tested:
+ - CPython 3.7-3.12 (ubuntu, windows, mac)
+ - PyPy 3.7-3.10 (ubuntu, wundows, mac)
+ - `RESERVED_ATTRS` is now a list and version dependent
+- Fix `JsonFormatter.__init__` return type (`None`) - @nhairs
+- Moved to `pyproject.toml` - @nhairs
+- Update linting and testing - @nhairs
+ - Split lint and test steps in GHA
+ - Use validate-pyproject, black, pylint, mypy
+
+## [2.0.7](https://github.com/nhairs/python-json-logger/compare/v2.0.6...v2.0.7) - 2023-02-21
+### Changed
+- Fix inclusion of py.typed in pip packages - @sth
+- Added pytest support with test file rename. Migrated to assertEqual
+
+## [2.0.6](https://github.com/nhairs/python-json-logger/compare/v2.0.5...v2.0.6) - 2023-02-14
+### Changed
+- Parameter `rename_fields` in merge_record_extra is now optional - @afallou
+
+## [2.0.5](https://github.com/nhairs/python-json-logger/compare/v2.0.4...v2.0.5) - 2023-02-12
+### Added
+- Allow reserved attrs to be renamed - @henkhogan
+- Support added for Python 3.11
+- Now verifying builds in Pypy 3.9 as well
+- Type annotations are now in the package - @louis-jaris
+### Changed
+- Fix rename_fields for exc_info - @guilhermeferrari
+- Cleaned up test file for PEP8 - @lopagela
+- Cleaned up old Python 2 artifacts - @louis-jaris
+- Dropped Python 3.5 support - @idomozes
+- Moved type check via tox into 3.11 run only
+- Added test run in Python3.6 (will keep for a little while longer, but it's EOL so upgrade)
+
+## [2.0.4](https://github.com/nhairs/python-json-logger/compare/v2.0.3...v2.0.4) - 2022-07-11
+### Changed
+- Fix too strict regex for percentage style logging - @aberres
+
+## [2.0.3](https://github.com/nhairs/python-json-logger/compare/v2.0.2...v2.0.3) - 2022-07-08
+### Added
+- Add PEP 561 marker/basic mypy configuration. - @bringhurst
+- Workaround logging.LogRecord.msg type of string. - @bringhurst
+### Changed
+- Changed a link archive of the reference page in case it's down. - @ahonnecke
+- Removed unnecessary try-except around OrderedDict usage - @sozofaan
+- Update documentation link to json module + use https - @deronnax
+- Dropped 3.5 support. - @bringhurst
+
+## [2.0.2](https://github.com/nhairs/python-json-logger/compare/v2.0.1...v2.0.2) - 2021-07-27
+### Added
+- Officially supporting 3.9 - @felixonmars.
+- You can now add static fields to log objects - @cosimomeli.
+### Changed
+- Dropped 3.4 support.
+- Dropped Travis CI for Github Actions.
+- Wheel should build for python 3 instead of just 3.4 now.
+
+## [2.0.1](https://github.com/nhairs/python-json-logger/compare/v2.0.0...v2.0.1) - 2020-10-12
+### Added
+- Support Pypi long descripton - @ereli-cb
+### Changed
+- You can now rename output fields - @schlitzered
+
+## [2.0.0](https://github.com/nhairs/python-json-logger/compare/v0.1.11...v2.0.0) - 2020-09-26
+### Added
+- New Changelog
+- Added timezone support to timestamps - @lalten
+- Refactored log record to function - @georgysavva
+- Add python 3.8 support - @tommilligan
+### Removed
+- Support for Python 2.7
+- Debian directory
+
+## [0.1.11](https://github.com/nhairs/python-json-logger/compare/v0.1.10...v0.1.11) - 2019-03-29
+### Added
+- Support for Python 3.7
+### Changed
+- 'stack_info' flag in logging calls is now respected in JsonFormatter by [@ghShu](https://github.com/ghShu)
diff --git a/docs/contributing.md b/docs/contributing.md
new file mode 100644
index 0000000..9b58a93
--- /dev/null
+++ b/docs/contributing.md
@@ -0,0 +1,130 @@
+# Contributing
+
+Contributions are welcome!
+
+## Code of Conduct
+
+In general we follow the [Python Software Foundation Code of Conduct](https://policies.python.org/python.org/code-of-conduct/). Please note that we are not affiliated with the PSF.
+
+## Pull Request Process
+
+**0. Before you begin**
+
+If you're not familiar with contributing to open source software, [start by reading this guide](https://opensource.guide/how-to-contribute/).
+
+Be aware that anything you contribute will be licenced under [the project's licence](https://github.com/nhairs/python-json-logger/blob/main/LICENSE). If you are making a change as a part of your job, be aware that your employer might own your work and you'll need their permission in order to licence the code.
+
+### 1. Find something to work on
+
+Where possible it's best to stick to established issues where discussion has already taken place. Contributions that haven't come from a discussed issue are less likely to be accepted.
+
+The following are things that can be worked on without an existing issue:
+
+- Updating documentation. This includes fixing in-code documentation / comments, and the overall docs.
+- Small changes that don't change functionality such as refactoring or adding / updating tests.
+
+### 2. Fork the repository and make your changes
+
+#### Coding Style
+
+Before writing any code, please familiarize yourself with our [Python Style Guide](style-guide.md). This document outlines our coding conventions, formatting expectations, and common patterns used in the project. Adhering to this guide is crucial for maintaining code consistency and readability.
+
+While the style guide covers detailed conventions, always try to match the style of existing code in the module you are working on, especially regarding local patterns and structure.
+
+#### Development Setup
+
+All devlopment tooling can be installed (usually into a virtual environment), using the `dev` optional dependency:
+
+```shell
+pip install -e '.[dev]'`
+```
+
+Before creating your pull request you'll want to format your code and run the linters and tests:
+
+```shell
+# Format
+black src tests
+
+# Lint
+pylint --output-format=colorized src
+mypy src tests
+
+# Tests
+pytest
+```
+
+The above commands (`black`, `pylint`, `mypy`, `pytest`) should all be run before submitting a pull request.
+
+If making changes to the documentation you can preview the changes locally using `mkdocs`. Changes to the `README.md` can be previewed using a tool like [`grip`](https://github.com/joeyespo/grip) (installable via `pip install grip`).
+
+```shell
+mkdocs serve
+# For README preview (after installing grip):
+# grip
+```
+
+!!! note
+ In general we will always squash merge pull requests so you do not need to worry about a "clean" commit history.
+
+### 3. Checklist
+
+Before pushing and creating your pull request, you should make sure you've done the following:
+
+- Updated any relevant tests.
+- Formatted your code and run the linters and tests.
+- Updated the version number in `pyproject.toml`. In general using a `.devN` suffix is acceptable.
+ This is not required for changes that do no affect the code such as documentation.
+- Add details of the changes to the change log (`docs/change-log.md`), creating a new section if needed.
+- Add notes for new / changed features in the relevant docstring.
+
+**4. Create your pull request**
+
+When creating your pull request be aware that the title and description will be used for the final commit so pay attention to them.
+
+Your pull request description should include the following:
+
+- Why the pull request is being made
+- Summary of changes
+- How the pull request was tested - especially if not covered by unit testing.
+
+Once you've submitted your pull request make sure that all CI jobs are passing. Pull requests with failing jobs will not be reviewed.
+
+### 5. Code review
+
+Your code will be reviewed by a maintainer.
+
+If you're not familiar with code review start by reading [this guide](https://google.github.io/eng-practices/review/).
+
+!!! tip "Remember you are not your work"
+
+ You might be asked to explain or justify your choices. This is not a criticism of your value as a person!
+
+ Often this is because there are multiple ways to solve the same problem and the reviewer would like to understand more about the way you solved.
+
+## Common Topics
+
+### Adding a new encoder
+
+New encoders may be added, however how popular / common a library is will be taken into consideration before being added. You should open an issue before creating a pull request.
+
+### Versioning and breaking compatability
+
+This project uses semantic versioning.
+
+In general backwards compatability is always preferred. This library is widely used and not particularly sophisticated and as such there must be a good reason for breaking changes.
+
+Feature changes MUST be compatible with all [security supported versions of Python](https://endoflife.date/python) and SHOULD be compatible with all unsupported versions of Python where [recent downloads over the last 90 days exceeds 5% of all downloads](https://pypistats.org/packages/python-json-logger).
+
+In general, only the latest `major.minor` version of Python JSON Logger is supported. Bug fixes and feature backports requiring a version branch may be considered but must be discussed with the maintainers first.
+
+See also [Security Policy](security.md).
+
+### Spelling
+
+The original implementation of this project used US spelling so it will continue to use US spelling for all code.
+
+Documentation is more flexible and may use a variety of English spellings.
+
+### Contacting the Maintainers
+
+In general it is preferred to keep communication to GitHub, e.g. through comments on issues and pull requests. If you do need to contact the maintainers privately, please do so using the email addresses in the maintainers section of the `pyproject.toml`.
diff --git a/docs/cookbook.md b/docs/cookbook.md
new file mode 100644
index 0000000..4b747c5
--- /dev/null
+++ b/docs/cookbook.md
@@ -0,0 +1,270 @@
+# Cookbook
+
+Recipies for common tasks.
+
+## Include all fields
+
+By default Python JSON Logger will not include fields [defined in the standard library](https://docs.python.org/3/library/logging.html#logrecord-attributes) unless they are included in the format. Manually including all these fields is tedious and Python version specific. Instead of adding them as explicit fields, we can add them implicitly be ensuring they are not in the `reserver_attrs` argument of the formatter.
+
+```python
+all_fields_formatter = JsonFormatter(reserved_attrs=[])
+```
+
+## Custom Styles
+
+It is possible to support custom `style`s by setting `validate=False` and overriding the `parse` method.
+
+For example:
+
+```python
+class CommaSupport(JsonFormatter):
+ def parse(self) -> list[str]:
+ if isinstance(self._style, str) and self._style == ",":
+ return self._fmt.split(",")
+ return super().parse()
+
+formatter = CommaSupport("message,asctime", style=",", validate=False)
+```
+
+## Modifying the logged data
+
+You can modify the `dict` of data that will be logged by overriding the `process_log_record` method to modify fields before they are serialized to JSON.
+
+```python
+class SillyFormatter(JsonFormatter):
+ def process_log_record(self, log_record):
+ new_record = {k[::-1]: v for k, v in log_record.items()}
+ return new_record
+```
+
+
+## Request / Trace IDs
+
+There are many ways to add consistent request IDs to your logging. The exact method will depend on your needs and application.
+
+```python
+## Common Setup
+## -----------------------------------------------------------------------------
+import logging
+import uuid
+from pythonjsonlogger.json import JsonFormatter
+
+logger = logging.getLogger("test")
+logger.setLevel(logging.INFO)
+handler = logging.StreamHandler()
+logger.addHandler(handler)
+```
+
+One method would be to inject the request ID into each log call using the `extra` argument.
+```python
+## Solution 1
+## -----------------------------------------------------------------------------
+formatter = JsonFormatter()
+handler.setFormatter(formatter)
+
+def main_1():
+ print("========== MAIN 1 ==========")
+ for i in range(3):
+ request_id = uuid.uuid4()
+ logger.info("loop start", extra={"request_id": request_id})
+ logger.info(f"loop {i}", extra={"request_id": request_id})
+ logger.info("loop end", extra={"request_id": request_id})
+ return
+
+main_1()
+```
+
+Another method would be to use a filter to modify the `LogRecord` attributes. This would also allow us to use it in any other standard logging machinery. For this example I've manually set a `REQUEST_ID` global and some helper functions, but you might already have stuff available to you; for example, if you're using a web-framework with baked in request IDs.
+
+This is based on the [logging cookbook filter recipie](https://docs.python.org/3/howto/logging-cookbook.html#using-filters-to-impart-contextual-information).
+
+```python
+## Solution 2
+## -----------------------------------------------------------------------------
+REQUEST_ID: str | None = None
+
+def get_request_id() -> str:
+ return REQUEST_ID
+
+def generate_request_id():
+ global REQUEST_ID
+ REQUEST_ID = str(uuid.uuid4())
+
+class RequestIdFilter(logging.Filter):
+ def filter(self, record):
+ record.request_id = get_request_id() # Add request_id to the LogRecord
+ return True
+
+request_id_filter = RequestIdFilter()
+logger.addFilter(request_id_filter)
+
+def main_2():
+ print("========== MAIN 2 ==========")
+ for i in range(3):
+ generate_request_id()
+ logger.info("loop start")
+ logger.info(f"loop {i}")
+ logger.info("loop end")
+ return
+
+main_2()
+
+logger.removeFilter(request_id_filter)
+```
+
+Another method would be to create a custom formatter class and override the `process_log_record` method. This allows us to inject fields into the record before we log it without modifying the original `LogRecord`.
+
+```python
+## Solution 3
+## -----------------------------------------------------------------------------
+# Reuse REQUEST_ID stuff from solution 2
+class MyFormatter(JsonFormatter):
+ def process_log_record(self, log_record):
+ log_record["request_id"] = get_request_id()
+ return log_record
+
+handler.setFormatter(MyFormatter())
+
+def main_3():
+ print("========== MAIN 3 ==========")
+ for i in range(3):
+ generate_request_id()
+ logger.info("loop start")
+ logger.info(f"loop {i}")
+ logger.info("loop end")
+ return
+
+main_3()
+```
+
+## Using `fileConfig`
+
+To use the module with a yaml config file using the [`fileConfig` function](https://docs.python.org/3/library/logging.config.html#logging.config.fileConfig), use the class `pythonjsonlogger.json.JsonFormatter`. Here is a sample config file:
+
+```yaml title="example_config.yaml"
+version: 1
+disable_existing_loggers: False
+formatters:
+ default:
+ "()": pythonjsonlogger.json.JsonFormatter
+ format: "%(asctime)s %(levelname)s %(name)s %(module)s %(funcName)s %(lineno)s %(message)s"
+ rename_fields:
+ "asctime": "timestamp"
+ "levelname": "status"
+ static_fields:
+ "service": ext://logging_config.PROJECT_NAME
+ "env": ext://logging_config.ENVIRONMENT
+ "version": ext://logging_config.PROJECT_VERSION
+ "app_log": "true"
+handlers:
+ default:
+ formatter: default
+ class: logging.StreamHandler
+ stream: ext://sys.stderr
+ access:
+ formatter: default
+ class: logging.StreamHandler
+ stream: ext://sys.stdout
+loggers:
+ uvicorn.error:
+ level: INFO
+ handlers:
+ - default
+ propagate: no
+ uvicorn.access:
+ level: INFO
+ handlers:
+ - access
+ propagate: no
+```
+
+You'll notice that we are using `ext://...` for the `static_fields`. This will load data from other modules such as the one below.
+
+```python title="logging_config.py"
+import importlib.metadata
+import os
+
+
+def get_version_metadata():
+ # https://stackoverflow.com/a/78082532
+ version = importlib.metadata.version(PROJECT_NAME)
+ return version
+
+
+PROJECT_NAME = 'test-api'
+PROJECT_VERSION = get_version_metadata()
+ENVIRONMENT = os.environ.get('ENVIRONMENT', 'dev')
+```
+
+## Logging Expensive to Compute Data
+
+By the nature of Python's logging library, the JSON formatters will only ever run in handlers which are enabled for the given log level. This saves the performance hit of constructing JSON that is never used - but what about the data we pass into the logger? There are two options available to us: using if statements to avoid the call altogether, or using lazy string evaluation libraries.
+
+!!! note
+ The below strategies will work for data passed in the `msg` and `extra` arguments.
+
+To avoid the logging calls we use `logger.isEnabledFor` to ensure that we only start constructing our log messages if the logger is enabled:
+
+```python
+import logging
+import time
+
+from pythonjsonlogger.json import JsonFormatter
+
+def expensive_to_compute():
+ time.sleep(5)
+ return "world"
+
+## Setup
+## -------------------------------------
+logger = logging.getLogger()
+handler = logging.StreamHandler()
+formatter = JsonFormatter()
+handler.setFormatter(formatter)
+logger.addHandler(handler)
+logger.setLevel(logging.INFO)
+
+## Log Using isEnabledFor
+## -------------------------------------
+start = time.time()
+if logger.isEnabledFor(logging.INFO):
+ logger.info(
+ {
+ "data": "hello {}".format(expensive_to_compute())
+ }
+ )
+print(f"Logging INFO using isEnabledFor took: {int(time.time() - start)}s")
+
+start = time.time()
+if logger.isEnabledFor(logging.DEBUG):
+ logger.debug(
+ {
+ "data": "hello {}".format(expensive_to_compute())
+ }
+ )
+print(f"Logging DEBUG using isEnabledFor took: {int(time.time() - start)}s")
+```
+
+For lazy string evaluation we can take advantage of the fact that the default JSON encoders included in this package will call `str` on unkown objects. We can use this to build our own lazy string evaluators, or we can use an existing external package. Pre-existing solutions include: [`lazy-string`](https://pypi.org/project/lazy-string/)'s `LazyString` or [`stringlike`](https://pypi.org/project/stringlike/)'s `CachedLazyString`.
+
+```python
+## Log Using lazy-string
+## -------------------------------------
+from lazy_string import LazyString as L
+
+start = time.time()
+logger.info(
+ {
+ "data": L("hello {}".format, L(expensive_to_compute))
+ }
+)
+print(f"Logging INFO using LazyString took: {int(time.time() - start)}s")
+
+start = time.time()
+logger.debug(
+ {
+ "data": L("hello {}".format, L(expensive_to_compute))
+ }
+)
+print(f"Logging DEBUG using LazyString took: {int(time.time() - start)}s")
+```
diff --git a/docs/index.md b/docs/index.md
new file mode 100644
index 0000000..4c39bfb
--- /dev/null
+++ b/docs/index.md
@@ -0,0 +1,72 @@
+# Python JSON Logger
+
+[](https://pypi.python.org/pypi/python-json-logger/)
+[](https://pypi.python.org/pypi/python-json-logger/)
+[](https://pypi.python.org/pypi/python-json-logger/)
+[](https://github.com/nhairs/python-json-logger)
+[](https://github.com/nhairs/python-json-logger)
+
+
+## Introduction
+
+Python JSON Logger enables you produce JSON logs when using Python's `logging` package.
+
+JSON logs are machine readable allowing for much easier parsing and ingestion into log aggregation tools.
+
+This library assumes that you are famliar with the `logging` standard library package; if you are not you should start by reading the official [Logging HOWTO](https://docs.python.org/3/howto/logging.html).
+
+
+## Features
+
+- **Standard Library Compatible:** Implement JSON logging without modifying your existing log setup.
+- **Supports Multiple JSON Encoders:** In addition to the standard libary's `json` module, also supports the [`orjson`][pythonjsonlogger.orjson], [`msgspec`][pythonjsonlogger.msgspec] JSON encoders.
+- **Fully Customizable Output Fields:** Control required, excluded, and static fields including automatically picking up custom attributes on `LogRecord` objects. Fields can be renamed before they are output.
+- **Encode Any Type:** Encoders are customized to ensure that something sane is logged for any input including those that aren't supported by default. For example formatting UUID objects into their string representation and bytes objects into a base 64 encoded string.
+
+## Getting Started
+
+Jump right in with our [Quickstart Guide](quickstart.md) to get `python-json-logger` integrated into your project quickly.
+
+Here's a small taste of what it looks like:
+
+```python title="Example Usage"
+import logging
+from pythonjsonlogger.json import JsonFormatter
+
+logger = logging.getLogger()
+logger.setLevel(logging.INFO)
+
+handler = logging.StreamHandler()
+handler.setFormatter(JsonFormatter())
+
+logger.addHandler(handler)
+
+logger.info("Logging using python-json-logger!", extra={"more_data": True})
+# {"message": "Logging using python-json-logger!", "more_data": true}
+```
+
+## Where to Go Next
+
+* **[Quickstart Guide](quickstart.md):** For installation and basic setup.
+* **[Cookbook](cookbook.md):** For more advanced usage patterns and recipes.
+* **API Reference:** Dive into the details of specific formatters, functions, and classes (see navigation menu).
+* **[Contributing Guidelines](contributing.md):** If you'd like to contribute to the project.
+* **[Changelog](changelog.md):** To see what's new in recent versions.
+
+## Project Information
+
+### Bugs, Feature Requests, etc.
+Please [submit an issue on GitHub](https://github.com/nhairs/python-json-logger/issues).
+
+In the case of bug reports, please help us help you by following best practices [^1^](https://marker.io/blog/write-bug-report/) [^2^](https://www.chiark.greenend.org.uk/~sgtatham/bugs.html).
+
+In the case of feature requests, please provide background to the problem you are trying to solve so that we can find a solution that makes the most sense for the library as well as your use case.
+
+### License
+This project is licensed under the BSD 2 Clause License - see the [LICENSE file](https://github.com/nhairs/python-json-logger/blob/main/LICENSE) on GitHub.
+
+### Authors and Maintainers
+This project was originally authored by [Zakaria Zajac](https://github.com/madzak) and our wonderful [contributors](https://github.com/nhairs/python-json-logger/graphs/contributors).
+
+It is currently maintained by:
+- [Nicholas Hairs](https://github.com/nhairs) - [nicholashairs.com](https://www.nicholashairs.com)
diff --git a/docs/quickstart.md b/docs/quickstart.md
new file mode 100644
index 0000000..3613aa4
--- /dev/null
+++ b/docs/quickstart.md
@@ -0,0 +1,150 @@
+# Quick Start
+
+## Installation
+
+!!! note
+ All versions of this fork use version `>=3.0.0`.
+
+ To use pre-fork versions use `python-json-logger<3`.
+
+### Install via pip
+
+```shell
+pip install python-json-logger
+```
+
+### Install from GitHub
+
+To install from [releases](https://github.com/nhairs/python-json-logger/releases) (including development releases), you can use the URL to the specific wheel.
+
+```shell
+# e.g. 3.0.0 wheel
+pip install 'python-json-logger@https://github.com/nhairs/python-json-logger/releases/download/v3.0.0/python_json_logger-3.0.0-py3-none-any.whl'
+```
+
+## Usage
+
+Python JSON Logger provides [`logging.Formatter`](https://docs.python.org/3/library/logging.html#logging.Formatter) classes that encode the logged message into JSON. Although [a variety of JSON encoders are supported](#alternate-json-encoders), the following examples will use the [JsonFormatter][pythonjsonlogger.json.JsonFormatter] which uses the the `json` module from the standard library.
+
+### Integrating with Python's logging framework
+
+To produce JSON output, attach the formatter to a logging handler:
+
+```python
+import logging
+from pythonjsonlogger.json import JsonFormatter
+
+logger = logging.getLogger()
+
+logHandler = logging.StreamHandler()
+formatter = JsonFormatter()
+logHandler.setFormatter(formatter)
+logger.addHandler(logHandler)
+```
+
+### Output fields
+
+#### Required Fields
+You can control the logged fields by setting the `fmt` argument when creating the formatter. By default formatters will follow the same `style` of `fmt` as the `logging` module: `%`, `$`, and `{`. All [`LogRecord` attributes](https://docs.python.org/3/library/logging.html#logrecord-attributes) can be output using their name.
+
+```python
+formatter = JsonFormatter("{message}{asctime}{exc_info}", style="{")
+```
+
+#### Message Fields
+
+Instead of logging a string message you can log using a `dict`.
+
+```python
+logger.info({
+ "my_data": 1,
+ "message": "if you don't include this it will be an empty string",
+ "other_stuff": False,
+})
+```
+
+!!! warning
+ Be aware that if you log using a `dict`, other formatters may not be able to handle it.
+
+You can also add additional message fields using the `extra` argument.
+
+```python
+logger.info(
+ "this logs the same additional fields as above",
+ extra={
+ "my_data": 1,
+ "other_stuff": False,
+ },
+)
+```
+
+Finally, any non-standard attributes added to a `LogRecord` will also be included in the logged data. See [Cookbook: Request / Trace IDs](cookbook.md#request-trace-ids) for an example.
+
+#### Default Fields
+
+Default fields that are added to every log record prior to any other field can be set using the `default` argument.
+
+```python
+formatter = JsonFormatter(
+ defaults={"environment": "dev"}
+)
+# ...
+logger.info("this message will have environment=dev by default")
+logger.info("this overwrites the environment field", extra={"environment": "prod"})
+```
+
+#### Static Fields
+
+Static fields that are added to every log record can be set using the `static_fields` argument.
+
+```python
+formatter = JsonFormatter(
+ static_fields={"True gets logged on every record?": True}
+)
+```
+
+### Excluding fields
+
+You can prevent fields being added to the output data by adding them to `reserved_attrs`. By default all [`LogRecord` attributes](https://docs.python.org/3/library/logging.html#logrecord-attributes) are excluded.
+
+```python
+from pythonjsonlogger.core import RESERVED_ATTRS
+
+formatter = JsonFormatter(
+ reserved_attrs=RESERVED_ATTRS+["request_id", "my_other_field"]
+)
+```
+
+### Renaming fields
+
+You can rename fields using the `rename_fields` argument.
+
+```python
+formatter = JsonFormatter(
+ "{message}{levelname}",
+ style="{",
+ rename_fields={"levelname": "LEVEL"},
+)
+```
+
+### Custom object serialization
+
+Most formatters support `json_default` which is used to control how objects are serialized.
+
+```python
+def my_default(obj):
+ if isinstance(obj, MyClass):
+ return {"special": obj.special}
+
+formatter = JsonFormatter(json_default=my_default)
+```
+
+!!! note
+ When providing your own `json_default`, you likely want to call the original `json_default` for your encoder. Python JSON Logger provides custom default serializers for each encoder that tries very hard to ensure sane output is always logged.
+
+### Alternate JSON Encoders
+
+The following JSON encoders are also supported:
+
+- [orjson](https://github.com/ijl/orjson) - [pythonjsonlogger.orjson.OrjsonFormatter][]
+- [msgspec](https://github.com/jcrist/msgspec) - [pythonjsonlogger.msgspec.MsgspecFormatter][]
diff --git a/docs/security.md b/docs/security.md
new file mode 100644
index 0000000..f31e6e8
--- /dev/null
+++ b/docs/security.md
@@ -0,0 +1,13 @@
+# Security Policy
+
+## Supported Versions
+
+Security support for Python JSON Logger is provided for all [security supported versions of Python](https://endoflife.date/python) and for unsupported versions of Python where [recent downloads over the last 90 days exceeds 5% of all downloads](https://pypistats.org/packages/python-json-logger).
+
+
+As of 2024-04-24 security support is provided for Python versions `3.8+`.
+
+
+## Reporting a Vulnerability
+
+Please report vulnerabilties [using GitHub](https://github.com/nhairs/python-json-logger/security/advisories/new).
diff --git a/docs/style-guide.md b/docs/style-guide.md
new file mode 100644
index 0000000..ab217c5
--- /dev/null
+++ b/docs/style-guide.md
@@ -0,0 +1,131 @@
+# Python Style Guide
+
+This document outlines the coding style, conventions, and common patterns for the `python-json-logger` project. Adhering to this guide will help maintain code consistency, readability, and quality.
+
+## General Principles
+
+* **Readability Counts:** Write code that is easy for others (and your future self) to understand. This aligns with [PEP 20 (The Zen of Python)](https://peps.python.org/pep-0020/).
+* **Consistency:** Strive for consistency in naming, formatting, and structure throughout the codebase.
+* **Simplicity:** Prefer simple, straightforward solutions over overly complex ones.
+* **PEP 8:** Follow [PEP 8 (Style Guide for Python Code)](https://peps.python.org/pep-0008/) for all Python code. The automated tools mentioned below will enforce many of these rules. This guide highlights project-specific conventions or particularly important PEP 8 aspects.
+
+## Formatting and Linting
+
+We use automated tools to enforce a consistent code style and catch potential errors. These include:
+
+* **Black:** For opinionated code formatting.
+* **Pylint:** For static code analysis and error detection.
+* **MyPy:** For static type checking.
+
+Ensure these tools are run before committing code. Configuration for these tools can be found in `pyproject.toml`, `pylintrc`, and `mypy.ini` respectively. This guide primarily focuses on conventions not automatically verifiable by these tools.
+
+## Imports
+
+Imports should be structured into the following groups, separated by a blank line, and generally alphabetized within each group:
+
+1. **Future Imports:** e.g., `from __future__ import annotations`
+2. **Standard Library Imports:** e.g., `import sys`, `from datetime import datetime`
+3. **Installed (Third-Party) Library Imports:** e.g., `import pytest`
+4. **Application (Local) Imports:** e.g., `from .core import BaseJsonFormatter` (This project-specific pattern is crucial for internal organization).
+
+## Naming Conventions
+
+While PEP 8 covers most naming, we emphasize:
+
+* **Modules:** `lowercase_with_underscores.py`
+* **Packages:** `lowercase`
+* **Classes & Type Aliases:** `CapWords` (e.g., `BaseJsonFormatter`, `OptionalCallableOrStr`). This is standard, but explicitly stated for clarity.
+* **Constants:** `UPPERCASE_WITH_UNDERSCORES` (e.g., `RESERVED_ATTRS`). This is a project convention for module-level constants.
+
+(Functions, methods, and variables follow standard PEP 8 `lowercase_with_underscores`).
+
+## Comments
+
+* Use comments to explain non-obvious code, complex logic, or important design decisions. Avoid comments that merely restate what the code does.
+* For internal code organization within files, especially in longer modules or classes, use comments like `## Section Title ##` or `### Subsection Title ###` to delineate logical blocks of code (e.g., `## Parent Methods ##` as seen in `src/pythonjsonlogger/core.py`). This is distinct from Markdown headings used in this document.
+
+## Docstrings
+
+* All public modules, classes, functions, and methods **must** have docstrings.
+* We use `mkdocstrings` for generating API documentation, which defaults to the **Google Python Style Guide** for docstrings. Please adhere to this style. You can find the guide [here](https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings).
+* Docstrings should clearly explain the purpose, arguments, return values, and any exceptions raised.
+* **Project Convention:** Use the following markers to indicate changes over time:
+ * `*New in version_number*`: For features added in a specific version.
+ * `*Changed in version_number*`: For changes made in a specific version.
+ * `*Deprecated in version_number*`: For features deprecated in a specific version.
+
+ Example:
+ ```python
+ def my_function(param1: str, param2: int) -> bool:
+ """Does something interesting.
+
+ Args:
+ param1: The first parameter, a string.
+ param2: The second parameter, an integer.
+
+ Returns:
+ True if successful, False otherwise.
+
+ Raises:
+ ValueError: If param2 is negative.
+
+ *New in 3.1*
+ """
+ # ... function logic ...
+ return True # See 'Return Statements'
+ ```
+
+## Type Hinting
+
+* All new code **must** include type hints for function arguments, return types, and variables where appropriate, as per PEP 484.
+* Use standard types from the `typing` module.
+* **Project Convention:** For Python versions older than 3.10, use `typing_extensions.TypeAlias` for creating type aliases. For Python 3.10+, use `typing.TypeAlias` (e.g., `OptionalCallableOrStr: TypeAlias = ...`).
+
+## Return Statements
+
+* **Project Convention:** All functions and methods **must** have an explicit `return` statement.
+* If a function does not logically return a value, it should end with `return None` or simply `return`. This makes the intent clear and consistent across the codebase.
+
+ Example:
+ ```python
+ def process_data(data: dict) -> None:
+ """Processes the given data."""
+ # ... processing logic ...
+ print(data)
+ return # or return None
+ ```
+
+## Class Structure
+
+* Group methods logically within a class (e.g., initialization, public, protected/private, special methods).
+* The use of internal code comments like `## Parent Methods ##` (as seen in `src/pythonjsonlogger/core.py`) is encouraged for readability in complex classes.
+
+## Project-Specific Code Patterns and Idioms
+
+Familiarize yourself with these patterns commonly used in this project:
+
+* **Version-Specific Logic:** Using `sys.version_info` for compatibility:
+ ```python
+ if sys.version_info >= (3, 10):
+ # Python 3.10+ specific code
+ else:
+ # Code for older versions
+ ```
+* **Type Aliases for Clarity:** As mentioned in Type Hinting, using `TypeAlias` for complex type combinations improves readability.
+* **Custom Exceptions:** Defining custom exception classes for application-specific error conditions (e.g., `MissingPackageError` in `src/pythonjsonlogger/exception.py`).
+* **Helper/Utility Functions:** Encapsulating reusable logic in utility modules (e.g., functions in `src/pythonjsonlogger/utils.py`).
+* **Conditional Imports for Optional Dependencies:** The pattern in `src/pythonjsonlogger/__init__.py` for checking optional dependencies like `orjson` and `msgspec` using `package_is_available` from `utils.py`.
+
+## Testing
+
+This project uses `pytest` for testing. Adherence to good testing practices is crucial.
+
+* **Test Location:** Tests are located in the `tests/` directory.
+* **Test Naming:** Test files `test_*.py`; test functions `test_*`.
+* **Fixtures:** Utilize `pytest` fixtures (`@pytest.fixture`) for setup.
+ * **Project Pattern:** The `LoggingEnvironment` dataclass and `env` fixture in `tests/test_formatters.py` is a key pattern for testing logger behavior. Adapt this for similar scenarios.
+* **Parametrization:** Use `@pytest.mark.parametrize` extensively to cover multiple scenarios efficiently.
+* **Clarity and Focus:** Each test should be focused and its name descriptive.
+* **Assertions:** Use clear, specific `pytest` assertions.
+
+By following these guidelines, we can ensure that `python-json-logger` remains a high-quality, maintainable, and developer-friendly library.
diff --git a/mkdocs.yml b/mkdocs.yml
new file mode 100644
index 0000000..47003c1
--- /dev/null
+++ b/mkdocs.yml
@@ -0,0 +1,114 @@
+site_name: "Python JSON Logger"
+site_url: https://nhairs.github.io/python-json-logger
+repo_url: https://github.com/nhairs/python-json-logger
+edit_uri: tree/master/docs
+copyright: " Copyright © Python JSON Logger Contributors"
+watch:
+ - mkdocs.yml
+ - README.md
+ - src/pythonjsonlogger
+ - docs
+
+nav:
+ - "Home": index.md
+ - quickstart.md
+ - cookbook.md
+ - changelog.md
+ - security.md
+ - contributing.md
+ - API Reference:
+ - ... | reference/pythonjsonlogger/*
+
+theme:
+ name: material
+
+ icon:
+ logo: material/code-braces
+
+ features:
+ - navigation.instant
+ - navigation.sections
+ - navigation.indexes
+ - navigation.expand
+ - navigation.top
+ - content.code.annotate
+ - content.code.copy
+ - toc.follow
+
+ palette:
+ - media: "(prefers-color-scheme: light)"
+ primary: amber
+ scheme: default
+ toggle:
+ icon: material/weather-night
+ name: Switch to dark mode
+ - media: "(prefers-color-scheme: dark)"
+ primary: amber
+ scheme: slate
+ toggle:
+ icon: material/weather-sunny
+ name: Switch to light mode
+
+extra:
+ social:
+ - icon: fontawesome/brands/github
+ link: https://github.com/nhairs/python-json-logger
+ version:
+ provider: mike
+
+markdown_extensions:
+ - toc:
+ permalink: "🔗"
+ - admonition
+ - def_list
+ - mdx_truly_sane_lists
+ - pymdownx.highlight:
+ anchor_linenums: true
+ - pymdownx.inlinehilite
+ - pymdownx.snippets
+ - pymdownx.superfences
+ - pymdownx.details
+ - pymdownx.caret
+
+plugins:
+ - autorefs
+ - search:
+ lang: en
+ - awesome-pages:
+ collapse_single_pages: true
+ - gen-files:
+ scripts:
+ - scripts/gen_ref_nav.py
+ - mkdocstrings:
+ default_handler: python
+ handlers:
+ python:
+ paths:
+ - src
+ import:
+ - https://docs.python.org/3/objects.inv
+ # - https://mkdocstrings.github.io/objects.inv
+ # - https://mkdocstrings.github.io/griffe/objects.inv
+ options:
+ filters:
+ - "!^_"
+ heading_level: 1
+ inherited_members: true
+ merge_init_into_class: true
+ #preload_modules: []
+ separate_signature: true
+ show_root_heading: true
+ show_root_full_path: true
+ show_signature_annotations: true
+ show_symbol_type_heading: true
+ show_symbol_type_toc: true
+ signature_crossrefs: true
+ summary: true
+ unwrap_annotated: true
+ show_source: false
+ docstring_section_style: spacy
+ - literate-nav:
+ nav_file: SUMMARY.txt
+ - mike:
+ canonical_version: latest
+
diff --git a/pylintrc b/pylintrc
index c2f821e..3db6c3e 100644
--- a/pylintrc
+++ b/pylintrc
@@ -3,7 +3,7 @@
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
-extension-pkg-whitelist=
+extension-pkg-whitelist=orjson
# Add files or directories to the blacklist. They should be base names, not
# paths.
@@ -75,8 +75,9 @@ disable=raw-checker-failed,
# cases. Disable rules that can cause conflicts
line-too-long,
# Module docstrings are not required
- missing-module-docstring
+ missing-module-docstring,
## Project Disables
+ duplicate-code
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
diff --git a/pyproject.toml b/pyproject.toml
index 63266ee..fd1856d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,18 +4,21 @@ build-backend = "setuptools.build_meta"
[project]
name = "python-json-logger"
-version = "3.0.1"
+version = "4.0.0.dev0"
description = "JSON Log Formatter for the Python Logging Package"
authors = [
{name = "Zakaria Zajac", email = "zak@madzak.com"},
+ {name = "Nicholas Hairs", email = "info+python-json-logger@nicholashairs.com"},
]
maintainers = [
{name = "Nicholas Hairs", email = "info+python-json-logger@nicholashairs.com"},
]
# Dependency Information
-requires-python = ">=3.7"
-# dependencies = []
+requires-python = ">=3.8"
+dependencies = [
+ "typing_extensions;python_version<'3.10'",
+]
# Extra information
readme = "README.md"
@@ -26,30 +29,46 @@ classifiers = [
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
- "Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
"Topic :: System :: Logging",
"Typing :: Typed",
]
[project.urls]
-# homepage = "https://nhairs.github.io/python-json-logger/latest/"
+Homepage = "https://nhairs.github.io/python-json-logger"
GitHub = "https://github.com/nhairs/python-json-logger"
[project.optional-dependencies]
-lint = [
+dev = [
+ ## Optional but required for dev
+ "orjson;implementation_name!='pypy'",
+ "msgspec;implementation_name!='pypy'",
+ ## Lint
"validate-pyproject[all]",
"black",
"pylint",
"mypy",
-]
-
-test = [
+ ## Test
"pytest",
+ "freezegun",
+ "backports.zoneinfo;python_version<'3.9'",
+ "tzdata",
+ ## Build
+ "build",
+ ## Docs
+ "mkdocs",
+ "mkdocs-material>=8.5",
+ "mkdocs-awesome-pages-plugin",
+ "mdx_truly_sane_lists",
+ "mkdocstrings[python]",
+ "mkdocs-gen-files",
+ "mkdocs-literate-nav",
+ "mike",
]
[tool.setuptools.packages.find]
diff --git a/scripts/gen_ref_nav.py b/scripts/gen_ref_nav.py
new file mode 100644
index 0000000..38175e4
--- /dev/null
+++ b/scripts/gen_ref_nav.py
@@ -0,0 +1,35 @@
+# NOTICE: This file is from mkdocstrings-python see NOTICE for details
+"""Generate the code reference pages and navigation."""
+
+from pathlib import Path
+
+import mkdocs_gen_files
+
+nav = mkdocs_gen_files.Nav()
+mod_symbol = '
'
+
+for path in sorted(Path("src").rglob("*.py")):
+ module_path = path.relative_to("src").with_suffix("")
+ doc_path = path.relative_to("src").with_suffix(".md")
+ full_doc_path = Path("reference", doc_path)
+
+ parts = tuple(module_path.parts)
+
+ if parts[-1] == "__init__":
+ parts = parts[:-1]
+ doc_path = doc_path.with_name("index.md")
+ full_doc_path = full_doc_path.with_name("index.md")
+ elif parts[-1].startswith("_"):
+ continue
+
+ nav_parts = [f"{mod_symbol} {part}" for part in parts]
+ nav[tuple(nav_parts)] = doc_path.as_posix()
+
+ with mkdocs_gen_files.open(full_doc_path, "w") as fd:
+ ident = ".".join(parts)
+ fd.write(f"::: {ident}")
+
+ mkdocs_gen_files.set_edit_path(full_doc_path, ".." / path)
+
+with mkdocs_gen_files.open("reference/SUMMARY.txt", "w") as nav_file:
+ nav_file.writelines(nav.build_literate_nav())
diff --git a/src/pythonjsonlogger/__init__.py b/src/pythonjsonlogger/__init__.py
index e69de29..298a3fe 100644
--- a/src/pythonjsonlogger/__init__.py
+++ b/src/pythonjsonlogger/__init__.py
@@ -0,0 +1,17 @@
+### IMPORTS
+### ============================================================================
+## Future
+
+## Standard Library
+import warnings
+
+## Installed
+
+## Application
+from . import json
+from . import utils
+
+### CONSTANTS
+### ============================================================================
+ORJSON_AVAILABLE = utils.package_is_available("orjson")
+MSGSPEC_AVAILABLE = utils.package_is_available("msgspec")
diff --git a/src/pythonjsonlogger/core.py b/src/pythonjsonlogger/core.py
new file mode 100644
index 0000000..1d6c252
--- /dev/null
+++ b/src/pythonjsonlogger/core.py
@@ -0,0 +1,403 @@
+"""Core functionality shared by all JSON loggers"""
+
+### IMPORTS
+### ============================================================================
+## Future
+from __future__ import annotations
+
+## Standard Library
+from datetime import datetime, timezone
+import importlib
+import logging
+import re
+import sys
+from typing import Optional, Union, Callable, List, Dict, Container, Any, Sequence
+
+if sys.version_info >= (3, 10):
+ from typing import TypeAlias
+else:
+ from typing_extensions import TypeAlias
+
+## Installed
+
+## Application
+
+
+### CONSTANTS
+### ============================================================================
+RESERVED_ATTRS: List[str] = [
+ "args",
+ "asctime",
+ "created",
+ "exc_info",
+ "exc_text",
+ "filename",
+ "funcName",
+ "levelname",
+ "levelno",
+ "lineno",
+ "module",
+ "msecs",
+ "message",
+ "msg",
+ "name",
+ "pathname",
+ "process",
+ "processName",
+ "relativeCreated",
+ "stack_info",
+ "thread",
+ "threadName",
+]
+"""Default reserved attributes.
+
+These come from the [default attributes of `LogRecord` objects](http://docs.python.org/library/logging.html#logrecord-attributes).
+
+Note:
+ Although considered a constant, this list is dependent on the Python version due to
+ different `LogRecord` objects having different attributes in different Python versions.
+
+*Changed in 3.0*: `RESERVED_ATTRS` is now `list[str]` instead of `tuple[str, ...]`.
+"""
+
+if sys.version_info >= (3, 12):
+ # taskName added in python 3.12
+ RESERVED_ATTRS.append("taskName")
+ RESERVED_ATTRS.sort()
+
+
+STYLE_STRING_TEMPLATE_REGEX = re.compile(r"\$\{(.+?)\}", re.IGNORECASE) # $ style
+STYLE_STRING_FORMAT_REGEX = re.compile(r"\{(.+?)\}", re.IGNORECASE) # { style
+STYLE_PERCENT_REGEX = re.compile(r"%\((.+?)\)", re.IGNORECASE) # % style
+
+## Type Aliases
+## -----------------------------------------------------------------------------
+OptionalCallableOrStr: TypeAlias = Optional[Union[Callable, str]]
+"""Type alias"""
+
+LogRecord: TypeAlias = Dict[str, Any]
+"""Type alias"""
+
+
+### FUNCTIONS
+### ============================================================================
+def str_to_object(obj: Any) -> Any:
+ """Import strings to an object, leaving non-strings as-is.
+
+ Args:
+ obj: the object or string to process
+
+ *New in 3.1*
+ """
+
+ if not isinstance(obj, str):
+ return obj
+
+ module_name, attribute_name = obj.rsplit(".", 1)
+ return getattr(importlib.import_module(module_name), attribute_name)
+
+
+def merge_record_extra(
+ record: logging.LogRecord,
+ target: Dict,
+ reserved: Container[str],
+ rename_fields: Optional[Dict[str, str]] = None,
+) -> Dict:
+ """
+ Merges extra attributes from LogRecord object into target dictionary
+
+ Args:
+ record: logging.LogRecord
+ target: dict to update
+ reserved: dict or list with reserved keys to skip
+ rename_fields: an optional dict, used to rename field names in the output.
+ e.g. Rename `levelname` to `log.level`: `{'levelname': 'log.level'}`
+
+ *Changed in 3.1*: `reserved` is now `Container[str]`.
+ """
+ if rename_fields is None:
+ rename_fields = {}
+ for key, value in record.__dict__.items():
+ # this allows to have numeric keys
+ if key not in reserved and not (hasattr(key, "startswith") and key.startswith("_")):
+ target[rename_fields.get(key, key)] = value
+ return target
+
+
+### CLASSES
+### ============================================================================
+class BaseJsonFormatter(logging.Formatter):
+ """Base class for all formatters
+
+ Must not be used directly.
+
+ *New in 3.1*
+
+ *Changed in 3.2*: `defaults` argument is no longer ignored.
+
+ *Added in 3.3*: `exc_info_as_array` and `stack_info_as_array` options are added.
+ """
+
+ _style: Union[logging.PercentStyle, str] # type: ignore[assignment]
+
+ ## Parent Methods
+ ## -------------------------------------------------------------------------
+ # pylint: disable=too-many-arguments,super-init-not-called
+ def __init__(
+ self,
+ fmt: Optional[str] = None,
+ datefmt: Optional[str] = None,
+ style: str = "%",
+ validate: bool = True,
+ *,
+ prefix: str = "",
+ rename_fields: Optional[Dict[str, str]] = None,
+ rename_fields_keep_missing: bool = False,
+ static_fields: Optional[Dict[str, Any]] = None,
+ reserved_attrs: Optional[Sequence[str]] = None,
+ timestamp: Union[bool, str] = False,
+ defaults: Optional[Dict[str, Any]] = None,
+ exc_info_as_array: bool = False,
+ stack_info_as_array: bool = False,
+ ) -> None:
+ """
+ Args:
+ fmt: string representing fields to log
+ datefmt: format to use when formatting `asctime` field
+ style: how to extract log fields from `fmt`
+ validate: validate `fmt` against style, if implementing a custom `style` you
+ must set this to `False`.
+ defaults: a dictionary containing default fields that are added before all other fields and
+ may be overridden. The supplied fields are still subject to `rename_fields`.
+ prefix: an optional string prefix added at the beginning of
+ the formatted string
+ rename_fields: an optional dict, used to rename field names in the output.
+ Rename `message` to `@message`: `{'message': '@message'}`
+ rename_fields_keep_missing: When renaming fields, include missing fields in the output.
+ static_fields: an optional dict, used to add fields with static values to all logs
+ reserved_attrs: an optional list of fields that will be skipped when
+ outputting json log record. Defaults to [all log record attributes][pythonjsonlogger.core.RESERVED_ATTRS].
+ timestamp: an optional string/boolean field to add a timestamp when
+ outputting the json log record. If string is passed, timestamp will be added
+ to log record using string as key. If True boolean is passed, timestamp key
+ will be "timestamp". Defaults to False/off.
+ exc_info_as_array: break the exc_info into a list of lines based on line breaks.
+ stack_info_as_array: break the stack_info into a list of lines based on line breaks.
+
+ *Changed in 3.1*:
+
+ - you can now use custom values for style by setting validate to `False`.
+ The value is stored in `self._style` as a string. The `parse` method will need to be
+ overridden in order to support the new style.
+ - Renaming fields now preserves the order that fields were added in and avoids adding
+ missing fields. The original behaviour, missing fields have a value of `None`, is still
+ available by setting `rename_fields_keep_missing` to `True`.
+ """
+ ## logging.Formatter compatibility
+ ## ---------------------------------------------------------------------
+ # Note: validate added in 3.8, defaults added in 3.10
+ if style in logging._STYLES:
+ _style = logging._STYLES[style][0](fmt) # type: ignore[operator]
+ if validate:
+ _style.validate()
+ self._style = _style
+ self._fmt = _style._fmt
+
+ elif not validate:
+ self._style = style
+ self._fmt = fmt
+
+ else:
+ raise ValueError(f"Style must be one of: {','.join(logging._STYLES.keys())}")
+
+ self.datefmt = datefmt
+
+ ## JSON Logging specific
+ ## ---------------------------------------------------------------------
+ self.prefix = prefix
+
+ # We recreate the dict in rename_fields and static_fields to support internal/external
+ # references which require getting the item to do the conversion.
+ # For more details see: https://github.com/nhairs/python-json-logger/pull/45
+ self.rename_fields = (
+ {key: rename_fields[key] for key in rename_fields} if rename_fields is not None else {}
+ )
+ self.static_fields = (
+ {key: static_fields[key] for key in static_fields} if static_fields is not None else {}
+ )
+
+ self.rename_fields_keep_missing = rename_fields_keep_missing
+ self.reserved_attrs = set(reserved_attrs if reserved_attrs is not None else RESERVED_ATTRS)
+ self.timestamp = timestamp
+
+ self._required_fields = self.parse()
+ self._skip_fields = set(self._required_fields)
+ self._skip_fields.update(self.reserved_attrs)
+ self.defaults = defaults if defaults is not None else {}
+ self.exc_info_as_array = exc_info_as_array
+ self.stack_info_as_array = stack_info_as_array
+ return
+
+ def format(self, record: logging.LogRecord) -> str:
+ """Formats a log record and serializes to json
+
+ Args:
+ record: the record to format
+ """
+ message_dict: Dict[str, Any] = {}
+ # TODO: logging.LogRecord.msg and logging.LogRecord.message in typeshed
+ # are always type of str. We shouldn't need to override that.
+ if isinstance(record.msg, dict):
+ message_dict = record.msg
+ record.message = ""
+ else:
+ record.message = record.getMessage()
+
+ # only format time if needed
+ if "asctime" in self._required_fields:
+ record.asctime = self.formatTime(record, self.datefmt)
+
+ # Display formatted exception, but allow overriding it in the
+ # user-supplied dict.
+ if record.exc_info and not message_dict.get("exc_info"):
+ message_dict["exc_info"] = self.formatException(record.exc_info)
+ if not message_dict.get("exc_info") and record.exc_text:
+ message_dict["exc_info"] = record.exc_text
+
+ # Display formatted record of stack frames
+ # default format is a string returned from :func:`traceback.print_stack`
+ if record.stack_info and not message_dict.get("stack_info"):
+ message_dict["stack_info"] = self.formatStack(record.stack_info)
+
+ log_record: LogRecord = {}
+ self.add_fields(log_record, record, message_dict)
+ log_record = self.process_log_record(log_record)
+
+ return self.serialize_log_record(log_record)
+
+ ## JSON Formatter Specific Methods
+ ## -------------------------------------------------------------------------
+ def parse(self) -> List[str]:
+ """Parses format string looking for substitutions
+
+ This method is responsible for returning a list of fields (as strings)
+ to include in all log messages.
+
+ You can support custom styles by overriding this method.
+
+ Returns:
+ list of fields to be extracted and serialized
+ """
+ if isinstance(self._style, logging.StringTemplateStyle):
+ formatter_style_pattern = STYLE_STRING_TEMPLATE_REGEX
+
+ elif isinstance(self._style, logging.StrFormatStyle):
+ formatter_style_pattern = STYLE_STRING_FORMAT_REGEX
+
+ elif isinstance(self._style, logging.PercentStyle):
+ # PercentStyle is parent class of StringTemplateStyle and StrFormatStyle
+ # so it must be checked last.
+ formatter_style_pattern = STYLE_PERCENT_REGEX
+
+ else:
+ raise ValueError(f"Style {self._style!r} is not supported")
+
+ if self._fmt:
+ return formatter_style_pattern.findall(self._fmt)
+
+ return []
+
+ def serialize_log_record(self, log_record: LogRecord) -> str:
+ """Returns the final representation of the log record.
+
+ Args:
+ log_record: the log record
+ """
+ return self.prefix + self.jsonify_log_record(log_record)
+
+ def add_fields(
+ self,
+ log_record: Dict[str, Any],
+ record: logging.LogRecord,
+ message_dict: Dict[str, Any],
+ ) -> None:
+ """Extract fields from a LogRecord for logging
+
+ This method can be overridden to implement custom logic for adding fields.
+
+ Args:
+ log_record: data that will be logged
+ record: the record to extract data from
+ message_dict: dictionary that was logged instead of a message. e.g
+ `logger.info({"is_this_message_dict": True})`
+ """
+ for field in self.defaults:
+ log_record[self._get_rename(field)] = self.defaults[field]
+
+ for field in self._required_fields:
+ log_record[self._get_rename(field)] = record.__dict__.get(field)
+
+ for data_dict in [self.static_fields, message_dict]:
+ for key, value in data_dict.items():
+ log_record[self._get_rename(key)] = value
+
+ merge_record_extra(
+ record,
+ log_record,
+ reserved=self._skip_fields,
+ rename_fields=self.rename_fields,
+ )
+
+ if self.timestamp:
+ key = self.timestamp if isinstance(self.timestamp, str) else "timestamp"
+ log_record[self._get_rename(key)] = datetime.fromtimestamp(
+ record.created, tz=timezone.utc
+ )
+
+ if self.rename_fields_keep_missing:
+ for field in self.rename_fields.values():
+ if field not in log_record:
+ log_record[field] = None
+ return
+
+ def _get_rename(self, key: str) -> str:
+ return self.rename_fields.get(key, key)
+
+ # Child Methods
+ # ..........................................................................
+ def jsonify_log_record(self, log_record: LogRecord) -> str:
+ """Convert this log record into a JSON string.
+
+ Child classes MUST override this method.
+
+ Args:
+ log_record: the data to serialize
+ """
+ raise NotImplementedError()
+
+ def process_log_record(self, log_record: LogRecord) -> LogRecord:
+ """Custom processing of the log record.
+
+ Child classes can override this method to alter the log record before it
+ is serialized.
+
+ Args:
+ log_record: incoming data
+ """
+ return log_record
+
+ def formatException(self, ei) -> Union[str, list[str]]: # type: ignore
+ """Format and return the specified exception information.
+
+ If exc_info_as_array is set to True, This method returns an array of strings.
+ """
+ exception_info_str = super().formatException(ei)
+ return exception_info_str.splitlines() if self.exc_info_as_array else exception_info_str
+
+ def formatStack(self, stack_info) -> Union[str, list[str]]: # type: ignore
+ """Format and return the specified stack information.
+
+ If stack_info_as_array is set to True, This method returns an array of strings.
+ """
+ stack_info_str = super().formatStack(stack_info)
+ return stack_info_str.splitlines() if self.stack_info_as_array else stack_info_str
diff --git a/src/pythonjsonlogger/defaults.py b/src/pythonjsonlogger/defaults.py
new file mode 100644
index 0000000..0a002a9
--- /dev/null
+++ b/src/pythonjsonlogger/defaults.py
@@ -0,0 +1,241 @@
+"""Collection of functions for building custom `json_default` functions.
+
+In general functions come in pairs of `use_x_default` and `x_default`, where the former is used
+to determine if you should call the latter.
+
+Most `use_x_default` functions also act as a [`TypeGuard`](https://mypy.readthedocs.io/en/stable/type_narrowing.html#user-defined-type-guards).
+"""
+
+### IMPORTS
+### ============================================================================
+## Future
+from __future__ import annotations
+
+## Standard Library
+import base64
+import dataclasses
+import datetime
+import enum
+import sys
+from types import TracebackType
+from typing import Any
+import traceback
+import uuid
+
+if sys.version_info >= (3, 10):
+ from typing import TypeGuard
+else:
+ from typing_extensions import TypeGuard
+
+## Installed
+
+## Application
+
+
+### FUNCTIONS
+### ============================================================================
+def unknown_default(obj: Any) -> str:
+ """Backup default function for any object type.
+
+ Will attempt to use `str` or `repr`. If both functions error will return
+ the string `"__could_not_encode__"`.
+
+ Args:
+ obj: object to handle
+ """
+ try:
+ return str(obj)
+ except Exception: # pylint: disable=broad-exception-caught
+ pass
+ try:
+ return repr(obj)
+ except Exception: # pylint: disable=broad-exception-caught
+ pass
+ return "__could_not_encode__"
+
+
+## Types
+## -----------------------------------------------------------------------------
+def use_type_default(obj: Any) -> TypeGuard[type]:
+ """Default check function for `type` objects (aka classes)."""
+ return isinstance(obj, type)
+
+
+def type_default(obj: type) -> str:
+ """Default function for `type` objects.
+
+ Args:
+ obj: object to handle
+ """
+ return obj.__name__
+
+
+## Dataclasses
+## -----------------------------------------------------------------------------
+def use_dataclass_default(obj: Any) -> bool:
+ """Default check function for dataclass instances"""
+ return dataclasses.is_dataclass(obj) and not isinstance(obj, type)
+
+
+def dataclass_default(obj) -> dict[str, Any]:
+ """Default function for dataclass instances
+
+ Args:
+ obj: object to handle
+ """
+ return dataclasses.asdict(obj)
+
+
+## Dates and Times
+## -----------------------------------------------------------------------------
+def use_time_default(obj: Any) -> TypeGuard[datetime.time]:
+ """Default check function for `datetime.time` instances"""
+ return isinstance(obj, datetime.time)
+
+
+def time_default(obj: datetime.time) -> str:
+ """Default function for `datetime.time` instances
+
+ Args:
+ obj: object to handle
+ """
+ return obj.isoformat()
+
+
+def use_date_default(obj: Any) -> TypeGuard[datetime.date]:
+ """Default check function for `datetime.date` instances"""
+ return isinstance(obj, datetime.date)
+
+
+def date_default(obj: datetime.date) -> str:
+ """Default function for `datetime.date` instances
+
+ Args:
+ obj: object to handle
+ """
+ return obj.isoformat()
+
+
+def use_datetime_default(obj: Any) -> TypeGuard[datetime.datetime]:
+ """Default check function for `datetime.datetime` instances"""
+ return isinstance(obj, datetime.datetime)
+
+
+def datetime_default(obj: datetime.datetime) -> str:
+ """Default function for `datetime.datetime` instances
+
+ Args:
+ obj: object to handle
+ """
+ return obj.isoformat()
+
+
+def use_datetime_any(obj: Any) -> TypeGuard[datetime.time | datetime.date | datetime.datetime]:
+ """Default check function for `datetime` related instances"""
+ return isinstance(obj, (datetime.time, datetime.date, datetime.datetime))
+
+
+def datetime_any(obj: datetime.time | datetime.date | datetime.date) -> str:
+ """Default function for `datetime` related instances
+
+ Args:
+ obj: object to handle
+ """
+ return obj.isoformat()
+
+
+## Exception and Tracebacks
+## -----------------------------------------------------------------------------
+def use_exception_default(obj: Any) -> TypeGuard[BaseException]:
+ """Default check function for exception instances.
+
+ Exception classes are not treated specially and should be handled by the
+ `[use_]type_default` functions.
+ """
+ return isinstance(obj, BaseException)
+
+
+def exception_default(obj: BaseException) -> str:
+ """Default function for exception instances
+
+ Args:
+ obj: object to handle
+ """
+ return f"{obj.__class__.__name__}: {obj}"
+
+
+def use_traceback_default(obj: Any) -> TypeGuard[TracebackType]:
+ """Default check function for tracebacks"""
+ return isinstance(obj, TracebackType)
+
+
+def traceback_default(obj: TracebackType) -> str:
+ """Default function for tracebacks
+
+ Args:
+ obj: object to handle
+ """
+ return "".join(traceback.format_tb(obj)).strip()
+
+
+## Enums
+## -----------------------------------------------------------------------------
+def use_enum_default(obj: Any) -> TypeGuard[enum.Enum | enum.EnumMeta]:
+ """Default check function for enums.
+
+ Supports both enum classes and enum values.
+ """
+ return isinstance(obj, (enum.Enum, enum.EnumMeta))
+
+
+def enum_default(obj: enum.Enum | enum.EnumMeta) -> Any | list[Any]:
+ """Default function for enums.
+
+ Supports both enum classes and enum values.
+
+ Args:
+ obj: object to handle
+ """
+ if isinstance(obj, enum.Enum):
+ return obj.value
+ return [e.value for e in obj] # type: ignore[var-annotated]
+
+
+## UUIDs
+## -----------------------------------------------------------------------------
+def use_uuid_default(obj: Any) -> TypeGuard[uuid.UUID]:
+ """Default check function for `uuid.UUID` instances"""
+ return isinstance(obj, uuid.UUID)
+
+
+def uuid_default(obj: uuid.UUID) -> str:
+ """Default function for `uuid.UUID` instances
+
+ Formats the UUID using "hyphen" format.
+
+ Args:
+ obj: object to handle
+ """
+ return str(obj)
+
+
+## Bytes
+## -----------------------------------------------------------------------------
+def use_bytes_default(obj: Any) -> TypeGuard[bytes | bytearray]:
+ """Default check function for bytes"""
+ return isinstance(obj, (bytes, bytearray))
+
+
+def bytes_default(obj: bytes | bytearray, url_safe: bool = True) -> str:
+ """Default function for bytes
+
+ Args:
+ obj: object to handle
+ url_safe: use URL safe base 64 character set.
+
+ Returns:
+ The byte data as a base 64 string.
+ """
+ if url_safe:
+ return base64.urlsafe_b64encode(obj).decode("utf8")
+ return base64.b64encode(obj).decode("utf8")
diff --git a/src/pythonjsonlogger/exception.py b/src/pythonjsonlogger/exception.py
new file mode 100644
index 0000000..1233f1a
--- /dev/null
+++ b/src/pythonjsonlogger/exception.py
@@ -0,0 +1,27 @@
+### IMPORTS
+### ============================================================================
+## Future
+from __future__ import annotations
+
+## Standard Library
+
+## Installed
+
+## Application
+
+
+### CLASSES
+### ============================================================================
+class PythonJsonLoggerError(Exception):
+ "Generic base clas for all Python JSON Logger exceptions"
+
+
+class MissingPackageError(ImportError, PythonJsonLoggerError):
+ "A required package is missing"
+
+ def __init__(self, name: str, extras_name: str | None = None) -> None:
+ msg = f"The {name!r} package is required but could not be found."
+ if extras_name is not None:
+ msg += f" It can be installed using 'python-json-logger[{extras_name}]'."
+ super().__init__(msg)
+ return
diff --git a/src/pythonjsonlogger/json.py b/src/pythonjsonlogger/json.py
new file mode 100644
index 0000000..21e78d0
--- /dev/null
+++ b/src/pythonjsonlogger/json.py
@@ -0,0 +1,119 @@
+"""JSON formatter using the standard library's `json` for encoding.
+
+Module contains the `JsonFormatter` and a custom `JsonEncoder` which supports a greater
+variety of types.
+"""
+
+### IMPORTS
+### ============================================================================
+## Future
+from __future__ import annotations
+
+## Standard Library
+import datetime
+import json
+from typing import Any, Callable, Optional, Union
+import warnings
+
+## Application
+from . import core
+from . import defaults as d
+
+
+### CLASSES
+### ============================================================================
+class JsonEncoder(json.JSONEncoder):
+ """A custom encoder extending [json.JSONEncoder](https://docs.python.org/3/library/json.html#json.JSONEncoder)"""
+
+ def default(self, o: Any) -> Any:
+ if d.use_datetime_any(o):
+ return self.format_datetime_obj(o)
+
+ if d.use_exception_default(o):
+ return d.exception_default(o)
+
+ if d.use_traceback_default(o):
+ return d.traceback_default(o)
+
+ if d.use_enum_default(o):
+ return d.enum_default(o)
+
+ if d.use_bytes_default(o):
+ return d.bytes_default(o)
+
+ if d.use_dataclass_default(o):
+ return d.dataclass_default(o)
+
+ if d.use_type_default(o):
+ return d.type_default(o)
+
+ try:
+ return super().default(o)
+ except TypeError:
+ return d.unknown_default(o)
+
+ def format_datetime_obj(self, o: datetime.time | datetime.date | datetime.datetime) -> str:
+ """Format datetime objects found in `self.default`
+
+ This allows subclasses to change the datetime format without understanding the
+ internals of the default method.
+ """
+ return d.datetime_any(o)
+
+
+class JsonFormatter(core.BaseJsonFormatter):
+ """JSON formatter using the standard library's [`json`](https://docs.python.org/3/library/json.html) for encoding"""
+
+ def __init__(
+ self,
+ *args,
+ json_default: core.OptionalCallableOrStr = None,
+ json_encoder: core.OptionalCallableOrStr = None,
+ json_serializer: Union[Callable, str] = json.dumps,
+ json_indent: Optional[Union[int, str]] = None,
+ json_ensure_ascii: bool = True,
+ **kwargs,
+ ) -> None:
+ """
+ Args:
+ args: see [BaseJsonFormatter][pythonjsonlogger.core.BaseJsonFormatter]
+ json_default: a function for encoding non-standard objects
+ json_encoder: custom JSON encoder
+ json_serializer: a [`json.dumps`](https://docs.python.org/3/library/json.html#json.dumps)-compatible callable
+ that will be used to serialize the log record.
+ json_indent: indent parameter for the `json_serializer`
+ json_ensure_ascii: `ensure_ascii` parameter for the `json_serializer`
+ kwargs: see [BaseJsonFormatter][pythonjsonlogger.core.BaseJsonFormatter]
+ """
+ super().__init__(*args, **kwargs)
+
+ self.json_default = core.str_to_object(json_default)
+ self.json_encoder = core.str_to_object(json_encoder)
+ self.json_serializer = core.str_to_object(json_serializer)
+ self.json_indent = json_indent
+ self.json_ensure_ascii = json_ensure_ascii
+ if not self.json_encoder and not self.json_default:
+ self.json_encoder = JsonEncoder
+ return
+
+ def jsonify_log_record(self, log_record: core.LogRecord) -> str:
+ """Returns a json string of the log record."""
+ return self.json_serializer(
+ log_record,
+ default=self.json_default,
+ cls=self.json_encoder,
+ indent=self.json_indent,
+ ensure_ascii=self.json_ensure_ascii,
+ )
+
+
+### DEPRECATED COMPATIBILITY
+### ============================================================================
+def __getattr__(name: str):
+ if name == "RESERVED_ATTRS":
+ warnings.warn(
+ "RESERVED_ATTRS has been moved to pythonjsonlogger.core",
+ DeprecationWarning,
+ )
+ return core.RESERVED_ATTRS
+ raise AttributeError(f"module {__name__} has no attribute {name}")
diff --git a/src/pythonjsonlogger/jsonlogger.py b/src/pythonjsonlogger/jsonlogger.py
index 349564a..0b283b2 100644
--- a/src/pythonjsonlogger/jsonlogger.py
+++ b/src/pythonjsonlogger/jsonlogger.py
@@ -1,304 +1,18 @@
-"""
-This library is provided to allow standard python logging
-to output log data as JSON formatted strings
-"""
-
-import logging
-import json
-import re
-import traceback
-import importlib
-from datetime import date, datetime, time, timezone
-import sys
-from typing import Any, Callable, Dict, List, Optional, Tuple, Union
-
-from inspect import istraceback
-
-from collections import OrderedDict
-
-# skip natural LogRecord attributes
-# http://docs.python.org/library/logging.html#logrecord-attributes
-# Changed in 3.0.0, is now list[str] instead of tuple[str, ...]
-RESERVED_ATTRS: List[str] = [
- "args",
- "asctime",
- "created",
- "exc_info",
- "exc_text",
- "filename",
- "funcName",
- "levelname",
- "levelno",
- "lineno",
- "module",
- "msecs",
- "message",
- "msg",
- "name",
- "pathname",
- "process",
- "processName",
- "relativeCreated",
- "stack_info",
- "thread",
- "threadName",
-]
-
-if sys.version_info >= (3, 12):
- # taskName added in python 3.12
- RESERVED_ATTRS.append("taskName")
- RESERVED_ATTRS.sort()
-
-OptionalCallableOrStr = Optional[Union[Callable, str]]
-
-
-def merge_record_extra(
- record: logging.LogRecord,
- target: Dict,
- reserved: Union[Dict, List],
- rename_fields: Optional[Dict[str, str]] = None,
-) -> Dict:
- """
- Merges extra attributes from LogRecord object into target dictionary
-
- :param record: logging.LogRecord
- :param target: dict to update
- :param reserved: dict or list with reserved keys to skip
- :param rename_fields: an optional dict, used to rename field names in the output.
- Rename levelname to log.level: {'levelname': 'log.level'}
- """
- if rename_fields is None:
- rename_fields = {}
- for key, value in record.__dict__.items():
- # this allows to have numeric keys
- if key not in reserved and not (hasattr(key, "startswith") and key.startswith("_")):
- target[rename_fields.get(key, key)] = value
- return target
-
-
-class JsonEncoder(json.JSONEncoder):
- """
- A custom encoder extending the default JSONEncoder
- """
-
- def default(self, o: Any) -> Any:
- if isinstance(o, (date, datetime, time)):
- return self.format_datetime_obj(o)
-
- if istraceback(o):
- return "".join(traceback.format_tb(o)).strip()
-
- # pylint: disable=unidiomatic-typecheck
- if type(o) == Exception or isinstance(o, Exception) or type(o) == type:
- return str(o)
-
- try:
- return super().default(o)
-
- except TypeError:
- try:
- return str(o)
-
- except Exception: # pylint: disable=broad-exception-caught
- return None
-
- def format_datetime_obj(self, o):
- """Format datetime objects found in self.default
+"""Stub module retained for compatibility.
- This allows subclasses to change the datetime format without understanding the
- internals of the default method.
- """
- return o.isoformat()
-
-
-class JsonFormatter(logging.Formatter):
- """
- A custom formatter to format logging records as json strings.
- Extra values will be formatted as str() if not supported by
- json default encoder
- """
-
- # pylint: disable=too-many-arguments
- def __init__(
- self,
- *args: Any,
- json_default: OptionalCallableOrStr = None,
- json_encoder: OptionalCallableOrStr = None,
- json_serializer: Union[Callable, str] = json.dumps,
- json_indent: Optional[Union[int, str]] = None,
- json_ensure_ascii: bool = True,
- prefix: str = "",
- rename_fields: Optional[dict] = None,
- static_fields: Optional[dict] = None,
- reserved_attrs: Union[Tuple[str, ...], List[str], None] = None,
- timestamp: Union[bool, str] = False,
- **kwargs: Any,
- ) -> None:
- """
- :param json_default: a function for encoding non-standard objects
- as outlined in https://docs.python.org/3/library/json.html
- :param json_encoder: optional custom encoder
- :param json_serializer: a :meth:`json.dumps`-compatible callable
- that will be used to serialize the log record.
- :param json_indent: indent parameter for json.dumps
- :param json_ensure_ascii: ensure_ascii parameter for json.dumps
- :param prefix: an optional string prefix added at the beginning of
- the formatted string
- :param rename_fields: an optional dict, used to rename field names in the output.
- Rename message to @message: {'message': '@message'}
- :param static_fields: an optional dict, used to add fields with static values to all logs
- :param reserved_attrs: an optional list of fields that will be skipped when
- outputting json log record. Defaults to all log record attributes:
- http://docs.python.org/library/logging.html#logrecord-attributes
- :param timestamp: an optional string/boolean field to add a timestamp when
- outputting the json log record. If string is passed, timestamp will be added
- to log record using string as key. If True boolean is passed, timestamp key
- will be "timestamp". Defaults to False/off.
- """
- self.json_default = self._str_to_fn(json_default)
- self.json_encoder = self._str_to_fn(json_encoder)
- self.json_serializer = self._str_to_fn(json_serializer)
- self.json_indent = json_indent
- self.json_ensure_ascii = json_ensure_ascii
- self.prefix = prefix
- self.rename_fields = rename_fields or {}
- self.static_fields = static_fields or {}
- if reserved_attrs is None:
- reserved_attrs = RESERVED_ATTRS
- self.reserved_attrs = dict(zip(reserved_attrs, reserved_attrs))
- self.timestamp = timestamp
-
- # super(JsonFormatter, self).__init__(*args, **kwargs)
- logging.Formatter.__init__(self, *args, **kwargs)
- if not self.json_encoder and not self.json_default:
- self.json_encoder = JsonEncoder
-
- self._required_fields = self.parse()
- self._skip_fields = dict(zip(self._required_fields, self._required_fields))
- self._skip_fields.update(self.reserved_attrs)
- return
-
- def _str_to_fn(self, fn_as_str):
- """
- If the argument is not a string, return whatever was passed in.
- Parses a string such as package.module.function, imports the module
- and returns the function.
-
- :param fn_as_str: The string to parse. If not a string, return it.
- """
- if not isinstance(fn_as_str, str):
- return fn_as_str
-
- path, _, function = fn_as_str.rpartition(".")
- module = importlib.import_module(path)
- return getattr(module, function)
-
- def parse(self) -> List[str]:
- """
- Parses format string looking for substitutions
-
- This method is responsible for returning a list of fields (as strings)
- to include in all log messages.
- """
- if isinstance(self._style, logging.StringTemplateStyle):
- formatter_style_pattern = re.compile(r"\$\{(.+?)\}", re.IGNORECASE)
- elif isinstance(self._style, logging.StrFormatStyle):
- formatter_style_pattern = re.compile(r"\{(.+?)\}", re.IGNORECASE)
- # PercentStyle is parent class of StringTemplateStyle and StrFormatStyle so
- # it needs to be checked last.
- elif isinstance(self._style, logging.PercentStyle):
- formatter_style_pattern = re.compile(r"%\((.+?)\)", re.IGNORECASE)
- else:
- raise ValueError(f"Invalid format: {self._fmt!r}")
-
- if self._fmt:
- return formatter_style_pattern.findall(self._fmt)
- return []
-
- def add_fields(
- self,
- log_record: Dict[str, Any],
- record: logging.LogRecord,
- message_dict: Dict[str, Any],
- ) -> None:
- """
- Override this method to implement custom logic for adding fields.
- """
- for field in self._required_fields:
- log_record[field] = record.__dict__.get(field)
-
- log_record.update(self.static_fields)
- log_record.update(message_dict)
- merge_record_extra(
- record,
- log_record,
- reserved=self._skip_fields,
- rename_fields=self.rename_fields,
- )
-
- if self.timestamp:
- # TODO: Can this use isinstance instead?
- # pylint: disable=unidiomatic-typecheck
- key = self.timestamp if type(self.timestamp) == str else "timestamp"
- log_record[key] = datetime.fromtimestamp(record.created, tz=timezone.utc)
-
- self._perform_rename_log_fields(log_record)
- return
-
- def _perform_rename_log_fields(self, log_record: Dict[str, Any]) -> None:
- for old_field_name, new_field_name in self.rename_fields.items():
- log_record[new_field_name] = log_record[old_field_name]
- del log_record[old_field_name]
- return
-
- def process_log_record(self, log_record: Dict[str, Any]) -> Dict[str, Any]:
- """
- Override this method to implement custom logic
- on the possibly ordered dictionary.
- """
- return log_record
-
- def jsonify_log_record(self, log_record: Dict[str, Any]) -> str:
- """Returns a json string of the log record."""
- return self.json_serializer(
- log_record,
- default=self.json_default,
- cls=self.json_encoder,
- indent=self.json_indent,
- ensure_ascii=self.json_ensure_ascii,
- )
-
- def serialize_log_record(self, log_record: Dict[str, Any]) -> str:
- """Returns the final representation of the log record."""
- return self.prefix + self.jsonify_log_record(log_record)
+It retains access to old names whilst sending deprecation warnings.
+"""
- def format(self, record: logging.LogRecord) -> str:
- """Formats a log record and serializes to json"""
- message_dict: Dict[str, Any] = {}
- # TODO: logging.LogRecord.msg and logging.LogRecord.message in typeshed
- # are always type of str. We shouldn't need to override that.
- if isinstance(record.msg, dict):
- message_dict = record.msg
- record.message = ""
- else:
- record.message = record.getMessage()
- # only format time if needed
- if "asctime" in self._required_fields:
- record.asctime = self.formatTime(record, self.datefmt)
+# pylint: disable=wrong-import-position,unused-import
- # Display formatted exception, but allow overriding it in the
- # user-supplied dict.
- if record.exc_info and not message_dict.get("exc_info"):
- message_dict["exc_info"] = self.formatException(record.exc_info)
- if not message_dict.get("exc_info") and record.exc_text:
- message_dict["exc_info"] = record.exc_text
- # Display formatted record of stack frames
- # default format is a string returned from :func:`traceback.print_stack`
- if record.stack_info and not message_dict.get("stack_info"):
- message_dict["stack_info"] = self.formatStack(record.stack_info)
+import warnings
- log_record: Dict[str, Any] = OrderedDict()
- self.add_fields(log_record, record, message_dict)
- log_record = self.process_log_record(log_record)
+## Throw warning
+warnings.warn(
+ "pythonjsonlogger.jsonlogger has been moved to pythonjsonlogger.json",
+ DeprecationWarning,
+)
- return self.serialize_log_record(log_record)
+## Import names
+from .json import JsonFormatter, JsonEncoder
+from .core import RESERVED_ATTRS
diff --git a/src/pythonjsonlogger/msgspec.py b/src/pythonjsonlogger/msgspec.py
new file mode 100644
index 0000000..8646f85
--- /dev/null
+++ b/src/pythonjsonlogger/msgspec.py
@@ -0,0 +1,63 @@
+"""JSON Formatter using [`msgspec`](https://github.com/jcrist/msgspec)"""
+
+### IMPORTS
+### ============================================================================
+## Future
+from __future__ import annotations
+
+## Standard Library
+from typing import Any
+
+## Installed
+
+## Application
+from . import core
+from . import defaults as d
+from .utils import package_is_available
+
+# We import msgspec after checking it is available
+package_is_available("msgspec", throw_error=True)
+import msgspec.json # pylint: disable=wrong-import-position,wrong-import-order
+
+
+### FUNCTIONS
+### ============================================================================
+def msgspec_default(obj: Any) -> Any:
+ """msgspec default encoder function for non-standard types"""
+ if d.use_exception_default(obj):
+ return d.exception_default(obj)
+ if d.use_traceback_default(obj):
+ return d.traceback_default(obj)
+ if d.use_enum_default(obj):
+ return d.enum_default(obj)
+ if d.use_type_default(obj):
+ return d.type_default(obj)
+ return d.unknown_default(obj)
+
+
+### CLASSES
+### ============================================================================
+class MsgspecFormatter(core.BaseJsonFormatter):
+ """JSON formatter using [`msgspec.json.Encoder`](https://jcristharif.com/msgspec/api.html#msgspec.json.Encoder) for encoding."""
+
+ def __init__(
+ self,
+ *args,
+ json_default: core.OptionalCallableOrStr = msgspec_default,
+ **kwargs,
+ ) -> None:
+ """
+ Args:
+ args: see [BaseJsonFormatter][pythonjsonlogger.core.BaseJsonFormatter]
+ json_default: a function for encoding non-standard objects
+ kwargs: see [BaseJsonFormatter][pythonjsonlogger.core.BaseJsonFormatter]
+ """
+ super().__init__(*args, **kwargs)
+
+ self.json_default = core.str_to_object(json_default)
+ self._encoder = msgspec.json.Encoder(enc_hook=self.json_default)
+ return
+
+ def jsonify_log_record(self, log_record: core.LogRecord) -> str:
+ """Returns a json string of the log record."""
+ return self._encoder.encode(log_record).decode("utf8")
diff --git a/src/pythonjsonlogger/orjson.py b/src/pythonjsonlogger/orjson.py
new file mode 100644
index 0000000..16db842
--- /dev/null
+++ b/src/pythonjsonlogger/orjson.py
@@ -0,0 +1,71 @@
+"""JSON Formatter using [orjson](https://github.com/ijl/orjson)"""
+
+### IMPORTS
+### ============================================================================
+## Future
+from __future__ import annotations
+
+## Standard Library
+from typing import Any
+
+## Installed
+
+## Application
+from . import core
+from . import defaults as d
+from .utils import package_is_available
+
+# We import msgspec after checking it is available
+package_is_available("orjson", throw_error=True)
+import orjson # pylint: disable=wrong-import-position,wrong-import-order
+
+
+### FUNCTIONS
+### ============================================================================
+def orjson_default(obj: Any) -> Any:
+ """orjson default encoder function for non-standard types"""
+ if d.use_exception_default(obj):
+ return d.exception_default(obj)
+ if d.use_traceback_default(obj):
+ return d.traceback_default(obj)
+ if d.use_bytes_default(obj):
+ return d.bytes_default(obj)
+ if d.use_enum_default(obj):
+ return d.enum_default(obj)
+ if d.use_type_default(obj):
+ return d.type_default(obj)
+ return d.unknown_default(obj)
+
+
+### CLASSES
+### ============================================================================
+class OrjsonFormatter(core.BaseJsonFormatter):
+ """JSON formatter using [orjson](https://github.com/ijl/orjson) for encoding."""
+
+ def __init__(
+ self,
+ *args,
+ json_default: core.OptionalCallableOrStr = orjson_default,
+ json_indent: bool = False,
+ **kwargs,
+ ) -> None:
+ """
+ Args:
+ args: see [BaseJsonFormatter][pythonjsonlogger.core.BaseJsonFormatter]
+ json_default: a function for encoding non-standard objects
+ json_indent: indent output with 2 spaces.
+ kwargs: see [BaseJsonFormatter][pythonjsonlogger.core.BaseJsonFormatter]
+ """
+ super().__init__(*args, **kwargs)
+
+ self.json_default = core.str_to_object(json_default)
+ self.json_indent = json_indent
+ return
+
+ def jsonify_log_record(self, log_record: core.LogRecord) -> str:
+ """Returns a json string of the log record."""
+ opt = orjson.OPT_NON_STR_KEYS
+ if self.json_indent:
+ opt |= orjson.OPT_INDENT_2
+
+ return orjson.dumps(log_record, default=self.json_default, option=opt).decode("utf8")
diff --git a/src/pythonjsonlogger/utils.py b/src/pythonjsonlogger/utils.py
new file mode 100644
index 0000000..d810a13
--- /dev/null
+++ b/src/pythonjsonlogger/utils.py
@@ -0,0 +1,40 @@
+"""Utilities for Python JSON Logger"""
+
+### IMPORTS
+### ============================================================================
+## Future
+from __future__ import annotations
+
+## Standard Library
+import importlib.util
+
+## Installed
+
+## Application
+from .exception import MissingPackageError
+
+
+### FUNCTIONS
+### ============================================================================
+def package_is_available(
+ name: str, *, throw_error: bool = False, extras_name: str | None = None
+) -> bool:
+ """Determine if the given package is available for import.
+
+ Args:
+ name: Import name of the package to check.
+ throw_error: Throw an error if the package is unavailable.
+ extras_name: Extra dependency name to use in `throw_error`'s message.
+
+ Raises:
+ MissingPackageError: When `throw_error` is `True` and the return value would be `False`
+
+ Returns:
+ If the package is available for import.
+ """
+ available = importlib.util.find_spec(name) is not None
+
+ if not available and throw_error:
+ raise MissingPackageError(name, extras_name)
+
+ return available
diff --git a/tests/test_deprecation.py b/tests/test_deprecation.py
new file mode 100644
index 0000000..a784aec
--- /dev/null
+++ b/tests/test_deprecation.py
@@ -0,0 +1,45 @@
+### IMPORTS
+### ============================================================================
+## Future
+from __future__ import annotations
+
+## Standard Library
+import subprocess
+import sys
+
+## Installed
+import pytest
+
+## Application
+import pythonjsonlogger
+
+
+### TESTS
+### ============================================================================
+def test_jsonlogger_deprecated():
+ with pytest.deprecated_call():
+ import pythonjsonlogger.jsonlogger
+ return
+
+
+def test_jsonlogger_reserved_attrs_deprecated():
+ with pytest.deprecated_call():
+ # Note: We use json instead of jsonlogger as jsonlogger will also produce
+ # a DeprecationWarning and we specifically want the one for RESERVED_ATTRS
+ pythonjsonlogger.json.RESERVED_ATTRS
+ return
+
+
+@pytest.mark.parametrize(
+ "command",
+ [
+ "from pythonjsonlogger import jsonlogger",
+ "import pythonjsonlogger.jsonlogger",
+ "from pythonjsonlogger.jsonlogger import JsonFormatter",
+ "from pythonjsonlogger.jsonlogger import RESERVED_ATTRS",
+ ],
+)
+def test_import(command: str):
+ output = subprocess.check_output([sys.executable, "-c", f"{command};print('OK')"])
+ assert output.strip() == b"OK"
+ return
diff --git a/tests/test_dictconfig.py b/tests/test_dictconfig.py
new file mode 100644
index 0000000..e956c03
--- /dev/null
+++ b/tests/test_dictconfig.py
@@ -0,0 +1,80 @@
+### IMPORTS
+### ============================================================================
+## Future
+from __future__ import annotations
+
+## Standard Library
+from dataclasses import dataclass
+import io
+import json
+import logging
+import logging.config
+from typing import Any, Generator
+
+## Installed
+import pytest
+
+### SETUP
+### ============================================================================
+_LOGGER_COUNT = 0
+EXT_VAL = 999
+
+LOGGING_CONFIG = {
+ "version": 1,
+ "disable_existing_loggers": False,
+ "formatters": {
+ "default": {
+ "()": "pythonjsonlogger.json.JsonFormatter",
+ "static_fields": {"ext-val": "ext://tests.test_dictconfig.EXT_VAL"},
+ }
+ },
+ "handlers": {
+ "default": {
+ "level": "DEBUG",
+ "formatter": "default",
+ "class": "logging.StreamHandler",
+ "stream": "ext://sys.stdout", # Default is stderr
+ },
+ },
+ "loggers": {
+ "": {"handlers": ["default"], "level": "WARNING", "propagate": False}, # root logger
+ },
+}
+
+
+@dataclass
+class LoggingEnvironment:
+ logger: logging.Logger
+ buffer: io.StringIO
+
+ def load_json(self) -> Any:
+ return json.loads(self.buffer.getvalue())
+
+
+@pytest.fixture
+def env() -> Generator[LoggingEnvironment, None, None]:
+ global _LOGGER_COUNT # pylint: disable=global-statement
+ _LOGGER_COUNT += 1
+ logging.config.dictConfig(LOGGING_CONFIG)
+ default_formatter = logging.root.handlers[0].formatter
+ logger = logging.getLogger(f"pythonjsonlogger.tests.{_LOGGER_COUNT}")
+ logger.setLevel(logging.DEBUG)
+ buffer = io.StringIO()
+ handler = logging.StreamHandler(buffer)
+ handler.setFormatter(default_formatter)
+ logger.addHandler(handler)
+ yield LoggingEnvironment(logger=logger, buffer=buffer)
+ logger.removeHandler(handler)
+ logger.setLevel(logging.NOTSET)
+ buffer.close()
+ return
+
+
+### TESTS
+### ============================================================================
+def test_external_reference_support(env: LoggingEnvironment):
+ env.logger.info("hello")
+ log_json = env.load_json()
+
+ assert log_json["ext-val"] == EXT_VAL
+ return
diff --git a/tests/test_formatters.py b/tests/test_formatters.py
new file mode 100644
index 0000000..050fc5e
--- /dev/null
+++ b/tests/test_formatters.py
@@ -0,0 +1,693 @@
+### IMPORTS
+### ============================================================================
+## Future
+from __future__ import annotations
+
+## Standard Library
+from dataclasses import dataclass
+import datetime
+import enum
+import io
+import json
+import logging
+import sys
+import traceback
+from types import TracebackType
+from typing import Any, Generator
+import uuid
+
+if sys.version_info >= (3, 9):
+ import zoneinfo
+else:
+ from backports import zoneinfo
+
+## Installed
+import freezegun
+import pytest
+
+## Application
+import pythonjsonlogger
+import pythonjsonlogger.defaults
+from pythonjsonlogger.core import RESERVED_ATTRS, BaseJsonFormatter, merge_record_extra
+from pythonjsonlogger.json import JsonFormatter
+
+if pythonjsonlogger.ORJSON_AVAILABLE:
+ from pythonjsonlogger.orjson import OrjsonFormatter
+
+if pythonjsonlogger.MSGSPEC_AVAILABLE:
+ from pythonjsonlogger.msgspec import MsgspecFormatter
+
+### SETUP
+### ============================================================================
+ALL_FORMATTERS: list[type[BaseJsonFormatter]] = [JsonFormatter]
+if pythonjsonlogger.ORJSON_AVAILABLE:
+ ALL_FORMATTERS.append(OrjsonFormatter)
+if pythonjsonlogger.MSGSPEC_AVAILABLE:
+ ALL_FORMATTERS.append(MsgspecFormatter)
+
+_LOGGER_COUNT = 0
+
+
+@dataclass
+class LoggingEnvironment:
+ logger: logging.Logger
+ buffer: io.StringIO
+ handler: logging.Handler
+
+ def set_formatter(self, formatter: BaseJsonFormatter) -> None:
+ self.handler.setFormatter(formatter)
+ return
+
+ def load_json(self) -> Any:
+ return json.loads(self.buffer.getvalue())
+
+
+@pytest.fixture
+def env() -> Generator[LoggingEnvironment, None, None]:
+ global _LOGGER_COUNT # pylint: disable=global-statement
+ _LOGGER_COUNT += 1
+ logger = logging.getLogger(f"pythonjsonlogger.tests.{_LOGGER_COUNT}")
+ logger.setLevel(logging.DEBUG)
+ buffer = io.StringIO()
+ handler = logging.StreamHandler(buffer)
+ logger.addHandler(handler)
+ yield LoggingEnvironment(logger=logger, buffer=buffer, handler=handler)
+ logger.removeHandler(handler)
+ logger.setLevel(logging.NOTSET)
+ buffer.close()
+ return
+
+
+def get_traceback_from_exception_followed_by_log_call(env_: LoggingEnvironment) -> str:
+ try:
+ raise Exception("test")
+ except Exception as e:
+ env_.logger.exception("hello")
+ str_traceback = traceback.format_exc()
+ # Formatter removes trailing new line
+ if str_traceback.endswith("\n"):
+ str_traceback = str_traceback[:-1]
+ return str_traceback
+
+
+class SomeClass:
+ def __init__(self, thing: int):
+ self.thing = thing
+ return
+
+
+class BrokenClass:
+ def __str__(self) -> str:
+ raise ValueError("hahah sucker")
+
+ def __repr__(self) -> str:
+ return self.__str__()
+
+
+@dataclass
+class SomeDataclass:
+ things: str
+ stuff: int
+ junk: bool
+
+
+try:
+ raise ValueError
+except ValueError as e:
+ STATIC_TRACEBACK = e.__traceback__
+ del e
+
+
+class MultiEnum(enum.Enum):
+ NONE = None
+ BOOL = False
+ STR = "somestring"
+ INT = 99
+ BYTES = b"some-bytes"
+
+
+NO_TEST = object() # Sentinal
+
+
+### TESTS
+### ============================================================================
+def test_merge_record_extra():
+ record = logging.LogRecord(
+ "name", level=1, pathname="", lineno=1, msg="Some message", args=None, exc_info=None
+ )
+ output = merge_record_extra(record, target={"foo": "bar"}, reserved=[])
+ assert output["foo"] == "bar"
+ assert output["msg"] == "Some message"
+ return
+
+
+## Common Formatter Tests
+## -----------------------------------------------------------------------------
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_default_format(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ env.set_formatter(class_())
+
+ msg = "testing logging format"
+ env.logger.info(msg)
+
+ log_json = env.load_json()
+
+ assert log_json["message"] == msg
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_percentage_format(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ env.set_formatter(
+ class_(
+ # All kind of different styles to check the regex
+ "[%(levelname)8s] %(message)s %(filename)s:%(lineno)d %(asctime)"
+ )
+ )
+
+ msg = "testing logging format"
+ env.logger.info(msg)
+ log_json = env.load_json()
+
+ assert log_json["message"] == msg
+ assert log_json.keys() == {"levelname", "message", "filename", "lineno", "asctime"}
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_defaults_field(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ env.set_formatter(class_(defaults={"first": 1, "second": 2}))
+
+ env.logger.info("testing defaults field", extra={"first": 1234})
+ log_json = env.load_json()
+
+ assert log_json["first"] == 1234
+ assert log_json["second"] == 2
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_rename_base_field(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ env.set_formatter(class_(rename_fields={"message": "@message"}))
+
+ msg = "testing logging format"
+ env.logger.info(msg)
+ log_json = env.load_json()
+
+ assert log_json["@message"] == msg
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_rename_with_defaults(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ """Make sure that the default fields are also renamed."""
+ env.set_formatter(class_(rename_fields={"custom": "@custom"}, defaults={"custom": 1234}))
+
+ msg = "testing rename with defaults"
+ env.logger.info(msg)
+ log_json = env.load_json()
+
+ assert log_json["@custom"] == 1234
+ assert "custom" not in log_json
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_rename_missing(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ env.set_formatter(class_(rename_fields={"missing_field": "new_field"}))
+
+ msg = "test rename missing field"
+ env.logger.info(msg)
+ log_json = env.load_json()
+
+ assert log_json["message"] == msg
+ assert "missing_field" not in log_json
+ assert "new_field" not in log_json
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_rename_keep_missing(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ env.set_formatter(
+ class_(rename_fields={"missing_field": "new_field"}, rename_fields_keep_missing=True)
+ )
+
+ msg = "test keep rename missing field"
+ env.logger.info(msg)
+ log_json = env.load_json()
+
+ assert log_json["message"] == msg
+ assert "missing_field" not in log_json
+ assert log_json["new_field"] is None
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_rename_preserve_order(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ env.set_formatter(
+ class_("{levelname}{message}{asctime}", style="{", rename_fields={"levelname": "LEVEL"})
+ )
+
+ env.logger.info("testing logging rename order")
+ log_json = env.load_json()
+
+ assert list(log_json.keys())[0] == "LEVEL"
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_rename_once(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ env.set_formatter(
+ class_(
+ "{levelname}{message}{asctime}",
+ style="{",
+ rename_fields={"levelname": "LEVEL", "message": "levelname"},
+ )
+ )
+
+ msg = "something"
+ env.logger.info(msg)
+ log_json = env.load_json()
+
+ assert log_json["LEVEL"] == "INFO"
+ assert log_json["levelname"] == msg
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_add_static_fields(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ env.set_formatter(class_(static_fields={"log_stream": "kafka"}))
+
+ msg = "testing static fields"
+ env.logger.info(msg)
+ log_json = env.load_json()
+
+ assert log_json["log_stream"] == "kafka"
+ assert log_json["message"] == msg
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_format_keys(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ supported_keys = [
+ "asctime",
+ "created",
+ "filename",
+ "funcName",
+ "levelname",
+ "levelno",
+ "lineno",
+ "module",
+ "msecs",
+ "message",
+ "name",
+ "pathname",
+ "process",
+ "processName",
+ "relativeCreated",
+ "thread",
+ "threadName",
+ ]
+
+ log_format = lambda x: [f"%({i:s})s" for i in x]
+ custom_format = " ".join(log_format(supported_keys))
+
+ env.set_formatter(class_(custom_format))
+
+ msg = "testing logging format"
+ env.logger.info(msg)
+ log_json = env.load_json()
+
+ for key in supported_keys:
+ assert key in log_json
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_unknown_format_key(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ env.set_formatter(class_("%(unknown_key)s %(message)s"))
+ env.logger.info("testing unknown logging format")
+ # make sure no error occurs
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_log_dict(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ env.set_formatter(class_())
+
+ msg = {"text": "testing logging", "num": 1, 5: "9", "nested": {"more": "data"}}
+ env.logger.info(msg)
+ log_json = env.load_json()
+
+ assert log_json["text"] == msg["text"]
+ assert log_json["num"] == msg["num"]
+ assert log_json["5"] == msg[5]
+ assert log_json["nested"] == msg["nested"]
+ assert log_json["message"] == ""
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_log_dict_defaults(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ env.set_formatter(class_(defaults={"d1": 1234, "d2": "hello"}))
+
+ msg = {"d2": "world"}
+ env.logger.info(msg)
+ log_json = env.load_json()
+
+ assert log_json["d1"] == 1234
+ assert log_json["d2"] == "world"
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_log_extra(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ env.set_formatter(class_())
+
+ extra = {"text": "testing logging", "num": 1, 5: "9", "nested": {"more": "data"}}
+ env.logger.info("hello", extra=extra) # type: ignore[arg-type]
+ log_json = env.load_json()
+
+ assert log_json["text"] == extra["text"]
+ assert log_json["num"] == extra["num"]
+ assert log_json["5"] == extra[5]
+ assert log_json["nested"] == extra["nested"]
+ assert log_json["message"] == "hello"
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_custom_logic_adds_field(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ class CustomJsonFormatter(class_): # type: ignore[valid-type,misc]
+
+ def process_log_record(self, log_record):
+ log_record["custom"] = "value"
+ return super().process_log_record(log_record)
+
+ env.set_formatter(CustomJsonFormatter())
+ env.logger.info("message")
+ log_json = env.load_json()
+
+ assert log_json["custom"] == "value"
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_exc_info(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ env.set_formatter(class_())
+
+ expected_value = get_traceback_from_exception_followed_by_log_call(env)
+ log_json = env.load_json()
+
+ assert log_json["exc_info"] == expected_value
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_exc_info_renamed(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ env.set_formatter(class_("%(exc_info)s", rename_fields={"exc_info": "stack_trace"}))
+
+ expected_value = get_traceback_from_exception_followed_by_log_call(env)
+ log_json = env.load_json()
+
+ assert log_json["stack_trace"] == expected_value
+ assert "exc_info" not in log_json
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_exc_info_renamed_not_required(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ env.set_formatter(class_(rename_fields={"exc_info": "stack_trace"}))
+
+ expected_value = get_traceback_from_exception_followed_by_log_call(env)
+ log_json = env.load_json()
+
+ assert log_json["stack_trace"] == expected_value
+ assert "exc_info" not in log_json
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_exc_info_renamed_no_error(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ env.set_formatter(class_(rename_fields={"exc_info": "stack_trace"}))
+
+ env.logger.info("message")
+ log_json = env.load_json()
+
+ assert "stack_trace" not in log_json
+ assert "exc_info" not in log_json
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_custom_object_serialization(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ def encode_complex(z):
+ if isinstance(z, complex):
+ return (z.real, z.imag)
+ raise TypeError(f"Object of type {type(z)} is no JSON serializable")
+
+ env.set_formatter(class_(json_default=encode_complex)) # type: ignore[call-arg]
+
+ env.logger.info("foo", extra={"special": complex(3, 8)})
+ log_json = env.load_json()
+
+ assert log_json["special"] == [3.0, 8.0]
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_rename_reserved_attrs(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ log_format = lambda x: [f"%({i:s})s" for i in x]
+ reserved_attrs_map = {
+ "exc_info": "error.type",
+ "exc_text": "error.message",
+ "funcName": "log.origin.function",
+ "levelname": "log.level",
+ "module": "log.origin.file.name",
+ "processName": "process.name",
+ "threadName": "process.thread.name",
+ "msg": "log.message",
+ }
+
+ custom_format = " ".join(log_format(reserved_attrs_map.keys()))
+ reserved_attrs = [
+ attr for attr in RESERVED_ATTRS if attr not in list(reserved_attrs_map.keys())
+ ]
+ env.set_formatter(
+ class_(custom_format, reserved_attrs=reserved_attrs, rename_fields=reserved_attrs_map)
+ )
+
+ env.logger.info("message")
+ log_json = env.load_json()
+
+ for old_name, new_name in reserved_attrs_map.items():
+ assert new_name in log_json
+ assert old_name not in log_json
+ return
+
+
+@freezegun.freeze_time(datetime.datetime(2017, 7, 14, 2, 40))
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_default_encoder_with_timestamp(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ if (pythonjsonlogger.ORJSON_AVAILABLE and class_ is OrjsonFormatter) or (
+ pythonjsonlogger.MSGSPEC_AVAILABLE and class_ is MsgspecFormatter
+ ):
+ # FakeDatetime not supported
+ # https://github.com/ijl/orjson/issues/481
+ # https://github.com/jcrist/msgspec/issues/678
+ def json_default(obj: Any) -> Any:
+ if isinstance(obj, freezegun.api.FakeDate):
+ return obj.isoformat()
+ raise ValueError(f"Unexpected object: {obj!r}")
+
+ env.set_formatter(class_(timestamp=True, json_default=json_default)) # type: ignore[call-arg]
+ else:
+ env.set_formatter(class_(timestamp=True))
+
+ env.logger.info("Hello")
+ log_json = env.load_json()
+
+ assert log_json["timestamp"] == "2017-07-14T02:40:00+00:00"
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+@pytest.mark.parametrize(
+ ["obj", "type_", "expected"],
+ [
+ ("somestring", str, "somestring"),
+ ("some unicode Привет", str, "some unicode Привет"),
+ (1234, int, 1234),
+ (1234.5, float, 1234.5),
+ (False, bool, False),
+ (None, type(None), None),
+ (b"some-bytes", str, "c29tZS1ieXRlcw=="),
+ (datetime.time(16, 45, 30, 100), str, "16:45:30.000100"),
+ (datetime.date(2024, 5, 5), str, "2024-05-05"),
+ (datetime.datetime(2024, 5, 5, 16, 45, 30, 100), str, "2024-05-05T16:45:30.000100"),
+ (
+ datetime.datetime(2024, 5, 5, 16, 45, 30, 100, zoneinfo.ZoneInfo("Australia/Sydney")),
+ str,
+ "2024-05-05T16:45:30.000100+10:00",
+ ),
+ (
+ uuid.UUID("urn:uuid:12345678-1234-5678-1234-567812345678"),
+ str,
+ "12345678-1234-5678-1234-567812345678",
+ ),
+ (Exception, str, "Exception"),
+ (Exception("Foo occurred"), str, "Exception: Foo occurred"),
+ (BaseException, str, "BaseException"),
+ (BaseException("BaseFoo occurred"), str, "BaseException: BaseFoo occurred"),
+ (STATIC_TRACEBACK, str, pythonjsonlogger.defaults.traceback_default(STATIC_TRACEBACK)), # type: ignore[arg-type]
+ (
+ SomeDataclass(things="le_things", stuff=99, junk=False),
+ dict,
+ {"things": "le_things", "stuff": 99, "junk": False},
+ ),
+ (SomeDataclass, str, "SomeDataclass"),
+ (SomeClass, str, "SomeClass"),
+ (SomeClass(1234), str, NO_TEST),
+ (BrokenClass(), str, "__could_not_encode__"),
+ (MultiEnum.NONE, type(None), None),
+ (MultiEnum.BOOL, bool, MultiEnum.BOOL.value),
+ (MultiEnum.STR, str, MultiEnum.STR.value),
+ (MultiEnum.INT, int, MultiEnum.INT.value),
+ (MultiEnum.BYTES, str, "c29tZS1ieXRlcw=="),
+ (MultiEnum, list, [None, False, "somestring", 99, "c29tZS1ieXRlcw=="]),
+ ],
+)
+def test_common_types_encoded(
+ env: LoggingEnvironment,
+ class_: type[BaseJsonFormatter],
+ obj: object,
+ type_: type,
+ expected: Any,
+):
+ ## Known bad cases
+ if pythonjsonlogger.MSGSPEC_AVAILABLE and class_ is MsgspecFormatter:
+ # Dataclass: https://github.com/jcrist/msgspec/issues/681
+ # Enum: https://github.com/jcrist/msgspec/issues/680
+ # These have been fixed in msgspec 0.19.0, however they also dropped python 3.8 support.
+ # https://github.com/jcrist/msgspec/releases/tag/0.19.0
+ if sys.version_info < (3, 9) and (
+ obj is SomeDataclass
+ or (
+ isinstance(obj, enum.Enum)
+ and obj in {MultiEnum.BYTES, MultiEnum.NONE, MultiEnum.BOOL}
+ )
+ ):
+ pytest.xfail()
+
+ ## Test
+ env.set_formatter(class_())
+ extra = {
+ "extra": obj,
+ "extra_dict": {"item": obj},
+ "extra_list": [obj],
+ }
+ env.logger.info("hello", extra=extra)
+ log_json = env.load_json()
+
+ assert isinstance(log_json["extra"], type_)
+ assert isinstance(log_json["extra_dict"]["item"], type_)
+ assert isinstance(log_json["extra_list"][0], type_)
+
+ if expected is NO_TEST:
+ return
+
+ if expected is None or isinstance(expected, bool):
+ assert log_json["extra"] is expected
+ assert log_json["extra_dict"]["item"] is expected
+ assert log_json["extra_list"][0] is expected
+ else:
+ assert log_json["extra"] == expected
+ assert log_json["extra_dict"]["item"] == expected
+ assert log_json["extra_list"][0] == expected
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_custom_default(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ def custom_default(obj):
+ if isinstance(obj, SomeClass):
+ return {"TYPE": obj.thing}
+ return None
+
+ env.set_formatter(class_(json_default=custom_default)) # type: ignore[call-arg]
+ env.logger.info("hello", extra={"extra": SomeClass(999)})
+ log_json = env.load_json()
+
+ assert log_json["extra"] == {"TYPE": 999}
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_exc_info_as_array(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ env.set_formatter(class_(exc_info_as_array=True))
+
+ try:
+ raise Exception("Error")
+ except BaseException:
+ env.logger.exception("Error occurs")
+ log_json = env.load_json()
+
+ assert isinstance(log_json["exc_info"], list)
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_exc_info_as_array_no_exc_info(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ env.set_formatter(class_(exc_info_as_array=True))
+
+ env.logger.info("hello")
+ log_json = env.load_json()
+
+ assert "exc_info" not in log_json
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_stack_info_as_array(env: LoggingEnvironment, class_: type[BaseJsonFormatter]):
+ env.set_formatter(class_(stack_info_as_array=True))
+
+ env.logger.info("hello", stack_info=True)
+ log_json = env.load_json()
+
+ assert isinstance(log_json["stack_info"], list)
+ return
+
+
+@pytest.mark.parametrize("class_", ALL_FORMATTERS)
+def test_stack_info_as_array_no_stack_info(
+ env: LoggingEnvironment, class_: type[BaseJsonFormatter]
+):
+ env.set_formatter(class_(stack_info_as_array=True))
+
+ env.logger.info("hello", stack_info=False)
+ log_json = env.load_json()
+
+ assert "stack_info" not in log_json
+ return
+
+
+## JsonFormatter Specific
+## -----------------------------------------------------------------------------
+def test_json_ensure_ascii_true(env: LoggingEnvironment):
+ env.set_formatter(JsonFormatter())
+ env.logger.info("Привет")
+
+ # Note: we don't use env.load_json as we want to know the raw output
+ msg = env.buffer.getvalue().split('"message": "', 1)[1].split('"', 1)[0]
+ assert msg == r"\u041f\u0440\u0438\u0432\u0435\u0442"
+ return
+
+
+def test_json_ensure_ascii_false(env: LoggingEnvironment):
+ env.set_formatter(JsonFormatter(json_ensure_ascii=False))
+ env.logger.info("Привет")
+
+ # Note: we don't use env.load_json as we want to know the raw output
+ msg = env.buffer.getvalue().split('"message": "', 1)[1].split('"', 1)[0]
+ assert msg == "Привет"
+ return
diff --git a/tests/test_jsonlogger.py b/tests/test_jsonlogger.py
deleted file mode 100644
index abd04ba..0000000
--- a/tests/test_jsonlogger.py
+++ /dev/null
@@ -1,309 +0,0 @@
-import datetime
-import logging
-from io import StringIO
-import json
-import random
-import sys
-import traceback
-import unittest
-import unittest.mock
-
-sys.path.append("src/python-json-logger")
-from pythonjsonlogger import jsonlogger
-
-
-class TestJsonLogger(unittest.TestCase):
- def setUp(self):
- self.log = logging.getLogger(f"logging-test-{random.randint(1, 101)}")
- self.log.setLevel(logging.DEBUG)
- self.buffer = StringIO()
-
- self.log_handler = logging.StreamHandler(self.buffer)
- self.log.addHandler(self.log_handler)
-
- def test_default_format(self):
- fr = jsonlogger.JsonFormatter()
- self.log_handler.setFormatter(fr)
-
- msg = "testing logging format"
- self.log.info(msg)
- log_json = json.loads(self.buffer.getvalue())
-
- self.assertEqual(log_json["message"], msg)
-
- def test_percentage_format(self):
- fr = jsonlogger.JsonFormatter(
- # All kind of different styles to check the regex
- "[%(levelname)8s] %(message)s %(filename)s:%(lineno)d %(asctime)"
- )
- self.log_handler.setFormatter(fr)
-
- msg = "testing logging format"
- self.log.info(msg)
- log_json = json.loads(self.buffer.getvalue())
-
- self.assertEqual(log_json["message"], msg)
- self.assertEqual(log_json.keys(), {"levelname", "message", "filename", "lineno", "asctime"})
-
- def test_rename_base_field(self):
- fr = jsonlogger.JsonFormatter(rename_fields={"message": "@message"})
- self.log_handler.setFormatter(fr)
-
- msg = "testing logging format"
- self.log.info(msg)
- log_json = json.loads(self.buffer.getvalue())
-
- self.assertEqual(log_json["@message"], msg)
-
- def test_rename_nonexistent_field(self):
- fr = jsonlogger.JsonFormatter(rename_fields={"nonexistent_key": "new_name"})
- self.log_handler.setFormatter(fr)
-
- stderr_watcher = StringIO()
- sys.stderr = stderr_watcher
- self.log.info("testing logging rename")
-
- self.assertTrue("KeyError: 'nonexistent_key'" in stderr_watcher.getvalue())
-
- def test_add_static_fields(self):
- fr = jsonlogger.JsonFormatter(static_fields={"log_stream": "kafka"})
-
- self.log_handler.setFormatter(fr)
-
- msg = "testing static fields"
- self.log.info(msg)
- log_json = json.loads(self.buffer.getvalue())
-
- self.assertEqual(log_json["log_stream"], "kafka")
- self.assertEqual(log_json["message"], msg)
-
- def test_format_keys(self):
- supported_keys = [
- "asctime",
- "created",
- "filename",
- "funcName",
- "levelname",
- "levelno",
- "lineno",
- "module",
- "msecs",
- "message",
- "name",
- "pathname",
- "process",
- "processName",
- "relativeCreated",
- "thread",
- "threadName",
- ]
-
- log_format = lambda x: [f"%({i:s})s" for i in x]
- custom_format = " ".join(log_format(supported_keys))
-
- fr = jsonlogger.JsonFormatter(custom_format)
- self.log_handler.setFormatter(fr)
-
- msg = "testing logging format"
- self.log.info(msg)
- log_msg = self.buffer.getvalue()
- log_json = json.loads(log_msg)
-
- for supported_key in supported_keys:
- if supported_key in log_json:
- self.assertTrue(True)
-
- def test_unknown_format_key(self):
- fr = jsonlogger.JsonFormatter("%(unknown_key)s %(message)s")
-
- self.log_handler.setFormatter(fr)
- msg = "testing unknown logging format"
- try:
- self.log.info(msg)
- except Exception:
- self.assertTrue(False, "Should succeed")
-
- def test_log_adict(self):
- fr = jsonlogger.JsonFormatter()
- self.log_handler.setFormatter(fr)
-
- msg = {"text": "testing logging", "num": 1, 5: "9", "nested": {"more": "data"}}
-
- self.log.info(msg)
- log_json = json.loads(self.buffer.getvalue())
- self.assertEqual(log_json.get("text"), msg["text"])
- self.assertEqual(log_json.get("num"), msg["num"])
- self.assertEqual(log_json.get("5"), msg[5])
- self.assertEqual(log_json.get("nested"), msg["nested"])
- self.assertEqual(log_json["message"], "")
-
- def test_log_extra(self):
- fr = jsonlogger.JsonFormatter()
- self.log_handler.setFormatter(fr)
-
- extra = {"text": "testing logging", "num": 1, 5: "9", "nested": {"more": "data"}}
- self.log.info("hello", extra=extra)
- log_json = json.loads(self.buffer.getvalue())
- self.assertEqual(log_json.get("text"), extra["text"])
- self.assertEqual(log_json.get("num"), extra["num"])
- self.assertEqual(log_json.get("5"), extra[5])
- self.assertEqual(log_json.get("nested"), extra["nested"])
- self.assertEqual(log_json["message"], "hello")
-
- def test_json_default_encoder(self):
- fr = jsonlogger.JsonFormatter()
- self.log_handler.setFormatter(fr)
-
- msg = {
- "adate": datetime.datetime(1999, 12, 31, 23, 59),
- "otherdate": datetime.date(1789, 7, 14),
- "otherdatetime": datetime.datetime(1789, 7, 14, 23, 59),
- "otherdatetimeagain": datetime.datetime(1900, 1, 1),
- }
- self.log.info(msg)
- log_json = json.loads(self.buffer.getvalue())
- self.assertEqual(log_json.get("adate"), "1999-12-31T23:59:00")
- self.assertEqual(log_json.get("otherdate"), "1789-07-14")
- self.assertEqual(log_json.get("otherdatetime"), "1789-07-14T23:59:00")
- self.assertEqual(log_json.get("otherdatetimeagain"), "1900-01-01T00:00:00")
-
- @unittest.mock.patch("time.time", return_value=1500000000.0)
- def test_json_default_encoder_with_timestamp(self, time_mock):
- fr = jsonlogger.JsonFormatter(timestamp=True)
- self.log_handler.setFormatter(fr)
-
- self.log.info("Hello")
-
- self.assertTrue(time_mock.called)
- log_json = json.loads(self.buffer.getvalue())
- self.assertEqual(log_json.get("timestamp"), "2017-07-14T02:40:00+00:00")
-
- def test_json_custom_default(self):
- def custom(o):
- return "very custom"
-
- fr = jsonlogger.JsonFormatter(json_default=custom)
- self.log_handler.setFormatter(fr)
-
- msg = {"adate": datetime.datetime(1999, 12, 31, 23, 59), "normal": "value"}
- self.log.info(msg)
- log_json = json.loads(self.buffer.getvalue())
- self.assertEqual(log_json.get("adate"), "very custom")
- self.assertEqual(log_json.get("normal"), "value")
-
- def test_json_custom_logic_adds_field(self):
- class CustomJsonFormatter(jsonlogger.JsonFormatter):
-
- def process_log_record(self, log_record):
- log_record["custom"] = "value"
- # Old Style "super" since Python 2.6's logging.Formatter is old
- # style
- return jsonlogger.JsonFormatter.process_log_record(self, log_record)
-
- self.log_handler.setFormatter(CustomJsonFormatter())
- self.log.info("message")
- log_json = json.loads(self.buffer.getvalue())
- self.assertEqual(log_json.get("custom"), "value")
-
- def get_traceback_from_exception_followed_by_log_call(self) -> str:
- try:
- raise Exception("test")
- except Exception:
- self.log.exception("hello")
- str_traceback = traceback.format_exc()
- # Formatter removes trailing new line
- if str_traceback.endswith("\n"):
- str_traceback = str_traceback[:-1]
-
- return str_traceback
-
- def test_exc_info(self):
- fr = jsonlogger.JsonFormatter()
- self.log_handler.setFormatter(fr)
- expected_value = self.get_traceback_from_exception_followed_by_log_call()
-
- log_json = json.loads(self.buffer.getvalue())
- self.assertEqual(log_json.get("exc_info"), expected_value)
-
- def test_exc_info_renamed(self):
- fr = jsonlogger.JsonFormatter("%(exc_info)s", rename_fields={"exc_info": "stack_trace"})
- self.log_handler.setFormatter(fr)
- expected_value = self.get_traceback_from_exception_followed_by_log_call()
-
- log_json = json.loads(self.buffer.getvalue())
- self.assertEqual(log_json.get("stack_trace"), expected_value)
- self.assertEqual(log_json.get("exc_info"), None)
-
- def test_ensure_ascii_true(self):
- fr = jsonlogger.JsonFormatter()
- self.log_handler.setFormatter(fr)
- self.log.info("Привет")
- msg = self.buffer.getvalue().split('"message": "', 1)[1].split('"', 1)[0]
- self.assertEqual(msg, r"\u041f\u0440\u0438\u0432\u0435\u0442")
-
- def test_ensure_ascii_false(self):
- fr = jsonlogger.JsonFormatter(json_ensure_ascii=False)
- self.log_handler.setFormatter(fr)
- self.log.info("Привет")
- msg = self.buffer.getvalue().split('"message": "', 1)[1].split('"', 1)[0]
- self.assertEqual(msg, "Привет")
-
- def test_custom_object_serialization(self):
- def encode_complex(z):
- if isinstance(z, complex):
- return (z.real, z.imag)
- else:
- type_name = z.__class__.__name__
- raise TypeError(f"Object of type '{type_name}' is no JSON serializable")
-
- formatter = jsonlogger.JsonFormatter(
- json_default=encode_complex, json_encoder=json.JSONEncoder
- )
- self.log_handler.setFormatter(formatter)
-
- value = {
- "special": complex(3, 8),
- }
-
- self.log.info(" message", extra=value)
- msg = self.buffer.getvalue()
- self.assertEqual(msg, '{"message": " message", "special": [3.0, 8.0]}\n')
-
- def test_rename_reserved_attrs(self):
- log_format = lambda x: [f"%({i:s})s" for i in x]
- reserved_attrs_map = {
- "exc_info": "error.type",
- "exc_text": "error.message",
- "funcName": "log.origin.function",
- "levelname": "log.level",
- "module": "log.origin.file.name",
- "processName": "process.name",
- "threadName": "process.thread.name",
- "msg": "log.message",
- }
-
- custom_format = " ".join(log_format(reserved_attrs_map.keys()))
- reserved_attrs = [
- _ for _ in jsonlogger.RESERVED_ATTRS if _ not in list(reserved_attrs_map.keys())
- ]
- formatter = jsonlogger.JsonFormatter(
- custom_format, reserved_attrs=reserved_attrs, rename_fields=reserved_attrs_map
- )
- self.log_handler.setFormatter(formatter)
- self.log.info("message")
-
- msg = self.buffer.getvalue()
- self.assertEqual(
- msg,
- '{"error.type": null, "error.message": null, "log.origin.function": "test_rename_reserved_attrs", "log.level": "INFO", "log.origin.file.name": "test_jsonlogger", "process.name": "MainProcess", "process.thread.name": "MainThread", "log.message": "message"}\n',
- )
-
- def test_merge_record_extra(self):
- record = logging.LogRecord(
- "name", level=1, pathname="", lineno=1, msg="Some message", args=None, exc_info=None
- )
- output = jsonlogger.merge_record_extra(record, target=dict(foo="bar"), reserved=[])
- self.assertIn("foo", output)
- self.assertIn("msg", output)
- self.assertEqual(output["foo"], "bar")
- self.assertEqual(output["msg"], "Some message")
diff --git a/tests/test_missing.py b/tests/test_missing.py
new file mode 100644
index 0000000..0878014
--- /dev/null
+++ b/tests/test_missing.py
@@ -0,0 +1,67 @@
+### IMPORTS
+### ============================================================================
+## Future
+from __future__ import annotations
+
+## Standard Library
+
+## Installed
+import pytest
+
+## Application
+import pythonjsonlogger
+from pythonjsonlogger.utils import package_is_available
+from pythonjsonlogger.exception import MissingPackageError
+
+### CONSTANTS
+### ============================================================================
+MISSING_PACKAGE_NAME = "package_name_is_definintely_not_available"
+MISSING_PACKAGE_EXTRA = "package_extra_that_is_unique"
+
+
+### TESTS
+### ============================================================================
+def test_package_is_available():
+ assert package_is_available("json")
+ return
+
+
+def test_package_not_available():
+ assert not package_is_available(MISSING_PACKAGE_NAME)
+ return
+
+
+def test_package_not_available_throw():
+ with pytest.raises(MissingPackageError) as e:
+ package_is_available(MISSING_PACKAGE_NAME, throw_error=True)
+ assert MISSING_PACKAGE_NAME in e.value.msg
+ assert MISSING_PACKAGE_EXTRA not in e.value.msg
+ return
+
+
+def test_package_not_available_throw_extras():
+ with pytest.raises(MissingPackageError) as e:
+ package_is_available(
+ MISSING_PACKAGE_NAME, throw_error=True, extras_name=MISSING_PACKAGE_EXTRA
+ )
+ assert MISSING_PACKAGE_NAME in e.value.msg
+ assert MISSING_PACKAGE_EXTRA in e.value.msg
+ return
+
+
+## Python JSON Logger Specific
+## -----------------------------------------------------------------------------
+if not pythonjsonlogger.ORJSON_AVAILABLE:
+
+ def test_orjson_import_error():
+ with pytest.raises(MissingPackageError, match="orjson"):
+ import pythonjsonlogger.orjson
+ return
+
+
+if not pythonjsonlogger.MSGSPEC_AVAILABLE:
+
+ def test_msgspec_import_error():
+ with pytest.raises(MissingPackageError, match="msgspec"):
+ import pythonjsonlogger.msgspec
+ return
diff --git a/tox.ini b/tox.ini
index 946be58..4304e37 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,35 +1,22 @@
[tox]
-requires = tox>=3
-envlist = lint, type, pypy{37,38,39,310}, py{37,38,39,310,311,312}
-
-[gh-actions]
-python =
- pypy-3.7: pypy37
- pypy-3.8: pypy38
- pypy-3.9: pypy39
- pypy-3.10: pypy310
- 3.7: py37
- 3.8: py38
- 3.9: py39
- 3.10: py310
- 3.11: py311
- 3.12: py312
+requires = tox>=3,tox-uv
+envlist = py{38,39,310,311,312,313}, pypy{38,39,310}
[testenv]
description = run unit tests
-extras = test
+extras = dev
commands =
pytest tests
[testenv:format]
description = run formatters
-extras = lint
+extras = dev
commands =
black src tests
[testenv:lint]
description = run linters
-extras = lint
+extras = dev
commands =
validate-pyproject pyproject.toml
black --check --diff src tests