diff --git a/.github/workflows/build_dist.yml b/.github/workflows/build_dist.yml index 6f8838a5..b907acc0 100644 --- a/.github/workflows/build_dist.yml +++ b/.github/workflows/build_dist.yml @@ -9,74 +9,154 @@ on: types: [created] pull_request: +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + jobs: sdist: name: Build source distribution runs-on: ubuntu-latest steps: - - name: Check out repository - uses: actions/checkout@v2 - with: - fetch-depth: 0 # To ensure tags are retrieved to enabe setuptools_scm to work - - name: Install Python 3.x - uses: actions/setup-python@v2 - with: - python-version: 3.x - - name: Build sdist - run: python setup.py sdist - - name: Save sdist - uses: actions/upload-artifact@v2 - with: - path: dist/*.tar.gz + - name: Check out repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 # To ensure tags are retrieved to enabe setuptools_scm to work + - name: Install Python 3.x + uses: actions/setup-python@v5 + with: + python-version: 3.x + - name: Build sdist + run: pipx run build --sdist + - name: Save sdist + uses: actions/upload-artifact@v4 + with: + name: cibw-sdist.tar.gz + path: dist/*.tar.gz wheels: name: Build wheels on ${{ matrix.os }} CIBW_BUILD=${{ matrix.cibw_build }} runs-on: ${{ matrix.os }} strategy: + # since multiple builds run at the same time, cancelling them all when one + # fails is wasteful and forces handling build problems one by one instead + # of showing a "full picture" + fail-fast: false matrix: - os: [ubuntu-latest, macOS-latest, windows-latest] - cibw_build: [cp37-*, cp38-*, cp39-*, cp310-*, cp311-*] + os: + - ubuntu-latest + - macos-13 # x86 + - macos-latest # arm + - windows-latest + cibw_build: [cp39-*, cp310-*, cp311-*, cp312-*, cp313-*, cp313t-*] steps: - name: Check out repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: - fetch-depth: 0 # To ensure tags are retrieved to enabe setuptools_scm to work + fetch-depth: 0 # To ensure tags are retrieved to enabe setuptools_scm to work - name: Install Python 3.x - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.x - - name: Set up QEMU # Needed to build aarch64 wheels + - name: Set up QEMU # Needed to build aarch64 wheels if: runner.os == 'Linux' - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 with: platforms: all + + - name: Setup free-threading variables + if: ${{ endsWith(matrix.cibw_build, 't-*') }} + shell: bash -l {0} + run: | + echo "CIBW_BEFORE_TEST=pip install pytest pytest-run-parallel" >> "$GITHUB_ENV" + echo "CIBW_ENVIRONMENT=PYLZ4_USE_SYSTEM_LZ4=False PYTEST_ADDOPTS=--parallel-threads=4" >> "$GITHUB_ENV" + echo "CIBW_TEST_COMMAND=tox -x testenv.deps+=pytest-run-parallel -x testenv.pass_env+=PYTEST_ADDOPTS -c {project}" >> "$GITHUB_ENV" + - name: Setup environment + if: ${{ !endsWith(matrix.cibw_build, 't-*') }} + shell: bash -l {0} + run: | + echo "CIBW_ENVIRONMENT=PYLZ4_USE_SYSTEM_LZ4=False" >> "$GITHUB_ENV" + echo "CIBW_TEST_COMMAND=tox -c {project}" >> "$GITHUB_ENV" + - name: Build wheels + uses: pypa/cibuildwheel@v2.23.2 + env: + # CIBW_ARCHS_LINUX: "x86_64 i686 aarch64" + CIBW_ARCHS_LINUX: "x86_64 i686" + CIBW_ARCHS_MACOS: "auto64" # since we have both runner arches + CIBW_ARCHS_WINDOWS: "AMD64 x86 ARM64" + CIBW_ENABLE: cpython-freethreading + CIBW_BUILD: ${{ matrix.cibw_build }} + CIBW_SKIP: "cp*-musllinux*" + CIBW_TEST_SKIP: "*-macosx_arm64 *-macosx_universal2:arm64 *-*linux_{ppc64le,s390x} *-win_arm64" + CIBW_BEFORE_BUILD: "python -m pip install -U pip && python -m pip install tox" + - name: Save wheels + uses: actions/upload-artifact@v4 + with: + name: cibw-wheels-${{ matrix.os }}-${{ strategy.job-index }} + path: ./wheelhouse/*.whl + + wheels_linux_arm: + name: Build wheels on ${{ matrix.os }} CIBW_BUILD=${{ matrix.cibw_build }} + runs-on: ${{ matrix.os }} + strategy: + # since multiple builds run at the same time, cancelling them all when one + # fails is wasteful and forces handling build problems one by one instead + # of showing a "full picture" + fail-fast: false + matrix: + os: + - ubuntu-24.04-arm + cibw_build: [cp39-*, cp310-*, cp311-*, cp312-*, cp313-*, cp313t-*] + steps: + - name: Check out repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 # To ensure tags are retrieved to enabe setuptools_scm to work + - name: Install Python 3.x + uses: actions/setup-python@v5 + with: + python-version: 3.x + - name: Setup free-threading variables + if: ${{ endsWith(matrix.cibw_build, 't-*') }} + shell: bash -l {0} + run: | + # Variables are set in order to be passed down to both cibuildwheel and the + # Docker image spawned by that action + echo "CIBW_BEFORE_TEST=pip install pytest pytest-run-parallel" >> "$GITHUB_ENV" + echo "CIBW_ENVIRONMENT=PYLZ4_USE_SYSTEM_LZ4=False PYTEST_ADDOPTS=--parallel-threads=1" >> "$GITHUB_ENV" + echo "CIBW_TEST_COMMAND=tox -x testenv.deps+=pytest-run-parallel -x testenv.pass_env+=PYTEST_ADDOPTS -c {project}" >> "$GITHUB_ENV" + - name: Setup environment + if: ${{ !endsWith(matrix.cibw_build, 't-*') }} + shell: bash -l {0} + run: | + echo "CIBW_ENVIRONMENT=PYLZ4_USE_SYSTEM_LZ4=False" >> "$GITHUB_ENV" + echo "CIBW_TEST_COMMAND=tox -c {project}" >> "$GITHUB_ENV" - name: Build wheels - uses: pypa/cibuildwheel@v2.11.4 + uses: pypa/cibuildwheel@v2.23.2 env: - CIBW_ARCHS_LINUX: "x86_64 i686 aarch64" - CIBW_ARCHS_MACOS: "x86_64 arm64" # universal2" - CIBW_ARCHS_WINDOWS: "AMD64 x86" + CIBW_ARCHS_LINUX: "aarch64" CIBW_BUILD: ${{ matrix.cibw_build }} CIBW_SKIP: "cp*-musllinux*" - CIBW_TEST_COMMAND: "tox -c {project}" - CIBW_TEST_SKIP: "*-macosx_arm64 *-macosx_universal2:arm64 *-*linux_{aarch64,ppc64le,s390x}" + CIBW_ENABLE: cpython-freethreading CIBW_BEFORE_BUILD: "python -m pip install -U pip && python -m pip install tox" - name: Save wheels - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: - path: wheelhouse/*.whl + name: cibw-wheels-${{ matrix.os }}-${{ strategy.job-index }} + path: ./wheelhouse/*.whl upload_pypi: name: Upload to PyPI - needs: [sdist, wheels] + needs: [sdist, wheels, wheels_linux_arm] runs-on: ubuntu-latest if: startsWith(github.ref, 'refs/tags/') steps: - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: - name: artifact + pattern: cibw-* path: dist + merge-multiple: true - uses: pypa/gh-action-pypi-publish@release/v1 with: password: ${{ secrets.PYPI_API_TOKEN }} - skip_existing: true + skip-existing: true diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 00000000..1ad8c58f --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,25 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the version of Python and other tools you might need +build: + os: ubuntu-20.04 + tools: + python: "3.11" + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: docs/conf.py + +# If using Sphinx, optionally build your docs in additional formats such as PDF +formats: + - pdf + +# Optionally declare the Python requirements required to build your docs +python: + install: + - requirements: docs/requirements.txt diff --git a/README.rst b/README.rst index 15ba4445..a75bc4d4 100644 --- a/README.rst +++ b/README.rst @@ -5,14 +5,10 @@ python-lz4 Status ====== -.. image:: https://travis-ci.org/python-lz4/python-lz4.svg?branch=master - :target: https://travis-ci.org/python-lz4/python-lz4 +.. image:: https://github.com/python-lz4/python-lz4/actions/workflows/build_dist.yml/badge.svg + :target: https://github.com/python-lz4/python-lz4/actions/workflows/build_dist.yml :alt: Build Status -.. image:: https://ci.appveyor.com/api/projects/status/r2qvw9mlfo63lklo/branch/master?svg=true - :target: https://ci.appveyor.com/project/jonathanunderwood/python-lz4 - :alt: Build Status Windows - .. image:: https://readthedocs.org/projects/python-lz4/badge/?version=stable :target: https://readthedocs.org/projects/python-lz4/ :alt: Documentation diff --git a/docs/conf.py b/docs/conf.py index 51b6576f..aefff88e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -55,8 +55,13 @@ # |version| and |release|, also used in various other places throughout the # built documents. # -from pkg_resources import get_distribution -release = get_distribution('lz4').version +try: + import importlib.metadata +except ImportError: + from pkg_resources import get_distribution + release = get_distribution('lz4').version +else: + release = importlib.metadata.version('lz4') version = release # The language for content autogenerated by Sphinx. Refer to documentation diff --git a/docs/install.rst b/docs/install.rst index 3d531f7b..f6eeafed 100644 --- a/docs/install.rst +++ b/docs/install.rst @@ -22,7 +22,7 @@ The LZ4 bindings require linking to the LZ4 library, and so if there is not a pre-compiled wheel available for your platform you will need to have a suitable C compiler available, as well as the Python development header files. On Debian/Ubuntu based systems the header files for Python are found in the -distribution package ``pythonX.Y-dev`` e.g. ``python3.7-dev``. On Fedora/Red Hat +distribution package ``pythonX.Y-dev`` e.g. ``python3.8-dev``. On Fedora/Red Hat based systems, the Python header files are found in the distribution package ``python-devel``. @@ -40,7 +40,7 @@ this command will result in the extension modules being compiled from source:: On systems for which pre-built wheels are available, the following command will force a local compilation of the extension modules from source:: - $ pip install --no-binary --no-cache-dir lz4 + $ pip install --no-binary :all: --no-cache-dir lz4 The package can also be installed manually from a checkout of the source code git repository:: diff --git a/docs/lz4.block.rst b/docs/lz4.block.rst index 0b2ef103..5c7f0c78 100644 --- a/docs/lz4.block.rst +++ b/docs/lz4.block.rst @@ -84,7 +84,7 @@ can be used in this case. True In this example we are catching the `lz4.block.LZ4BlockError` -exception. This exception is raisedd if the LZ4 library call fails, +exception. This exception is raised if the LZ4 library call fails, which can be caused by either the buffer used to store the uncompressed data (as set by `usize`) being too small, or the input compressed data being invalid - it is not possible to distinguish the diff --git a/lz4/_version.c b/lz4/_version.c index c611f0b3..af606abe 100644 --- a/lz4/_version.c +++ b/lz4/_version.c @@ -113,5 +113,9 @@ PyInit__version(void) if (module == NULL) return NULL; + #ifdef Py_GIL_DISABLED + PyUnstable_Module_SetGIL(module, Py_MOD_GIL_NOT_USED); + #endif + return module; } diff --git a/lz4/block/_block.c b/lz4/block/_block.c index 3e904a03..993cc44c 100644 --- a/lz4/block/_block.c +++ b/lz4/block/_block.c @@ -518,5 +518,9 @@ PyInit__block(void) Py_INCREF(LZ4BlockError); PyModule_AddObject(module, "LZ4BlockError", LZ4BlockError); + #ifdef Py_GIL_DISABLED + PyUnstable_Module_SetGIL(module, Py_MOD_GIL_NOT_USED); + #endif + return module; } diff --git a/lz4/frame/_frame.c b/lz4/frame/_frame.c index 34606653..e62c72c6 100644 --- a/lz4/frame/_frame.c +++ b/lz4/frame/_frame.c @@ -1677,5 +1677,9 @@ PyInit__frame(void) PyModule_AddIntConstant (module, "BLOCKSIZE_MAX1MB", LZ4F_max1MB); PyModule_AddIntConstant (module, "BLOCKSIZE_MAX4MB", LZ4F_max4MB); + #ifdef Py_GIL_DISABLED + PyUnstable_Module_SetGIL(module, Py_MOD_GIL_NOT_USED); + #endif + return module; } diff --git a/lz4/stream/_stream.c b/lz4/stream/_stream.c index 522fdeda..f0dfad57 100644 --- a/lz4/stream/_stream.c +++ b/lz4/stream/_stream.c @@ -1649,5 +1649,9 @@ PyInit__stream(void) Py_INCREF (LZ4StreamError); PyModule_AddObject (module, "LZ4StreamError", LZ4StreamError); + #ifdef Py_GIL_DISABLED + PyUnstable_Module_SetGIL(module, Py_MOD_GIL_NOT_USED); + #endif + return module; } diff --git a/setup.py b/setup.py index 2f2704fb..9bc91f01 100644 --- a/setup.py +++ b/setup.py @@ -1,8 +1,9 @@ #!/usr/bin/env python import os from setuptools import setup, find_packages, Extension +from setuptools.command.build_ext import new_compiler import sys -from distutils import ccompiler + # Note: if updating LZ4_REQUIRED_VERSION you need to update docs/install.rst as # well. @@ -35,12 +36,11 @@ def pkgconfig_installed_check(lib, required_version, default): liblz4_found = pkgconfig_installed_check('liblz4', LZ4_REQUIRED_VERSION, default=False) # Establish if we want to build experimental functionality or not. -experimental = os.environ.get("PYLZ4_EXPERIMENTAL", False) -if experimental is not False: - if experimental.upper() in ("1", "TRUE"): - experimental = True - else: - experimental = False +experimental_env = os.environ.get("PYLZ4_EXPERIMENTAL", "False") +if experimental_env.upper() in ("1", "TRUE"): + experimental = True +else: + experimental = False # Set up the extension modules. If a system wide lz4 library is found, and is # recent enough, we'll use that. Otherwise we'll build with the bundled one. If @@ -67,7 +67,13 @@ def pkgconfig_installed_check(lib, required_version, default): 'lz4/stream/_stream.c' ] -if liblz4_found is True: +use_system_liblz4_env = os.environ.get("PYLZ4_USE_SYSTEM_LZ4", "True") +if use_system_liblz4_env.upper() in ("1", "TRUE"): + use_system_liblz4 = True +else: + use_system_liblz4 = False + +if liblz4_found is True and use_system_liblz4 is True: extension_kwargs['libraries'] = ['lz4'] else: extension_kwargs['include_dirs'] = ['lz4libs'] @@ -97,7 +103,7 @@ def pkgconfig_installed_check(lib, required_version, default): ] ) -compiler = ccompiler.get_default_compiler() +compiler = new_compiler().compiler_type if compiler == 'msvc': extension_kwargs['extra_compile_args'] = [ @@ -107,7 +113,7 @@ def pkgconfig_installed_check(lib, required_version, default): '/wd4820', ] elif compiler in ('unix', 'mingw32'): - if liblz4_found: + if liblz4_found is True and use_system_liblz4 is True: extension_kwargs = pkgconfig_parse('liblz4') else: extension_kwargs['extra_compile_args'] = [ @@ -165,7 +171,7 @@ def pkgconfig_installed_check(lib, required_version, default): use_scm_version={ 'write_to': "lz4/version.py", }, - python_requires=">=3.7", + python_requires=">=3.9", setup_requires=[ 'setuptools_scm', 'pkgconfig', @@ -194,10 +200,10 @@ def pkgconfig_installed_check(lib, required_version, default): 'Intended Audience :: Developers', 'Programming Language :: C', 'Programming Language :: Python', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', + 'Programming Language :: Python :: 3.13', ], ) diff --git a/tests/block/test_block_0.py b/tests/block/test_block_0.py index 8fc0f488..a7731c3a 100644 --- a/tests/block/test_block_0.py +++ b/tests/block/test_block_0.py @@ -1,6 +1,8 @@ import lz4.block from multiprocessing.pool import ThreadPool import sys +import copy +import pytest from functools import partial if sys.version_info <= (3, 2): import struct @@ -68,6 +70,13 @@ def setup_kwargs(mode, store_size, c_return_bytearray=None, d_return_bytearray=N # Test single threaded usage with all valid variations of input def test_1(data, mode, store_size, c_return_bytearray, d_return_bytearray, dictionary): + if isinstance(data, memoryview): + data = memoryview(copy.deepcopy(data.obj)) + elif isinstance(data, bytearray): + data_x = bytearray() + data_x[:] = data + data = data_x + (c_kwargs, d_kwargs) = setup_kwargs( mode, store_size, c_return_bytearray, d_return_bytearray) @@ -79,10 +88,21 @@ def test_1(data, mode, store_size, c_return_bytearray, d_return_bytearray, dicti # Test multi threaded usage with all valid variations of input +@pytest.mark.thread_unsafe def test_2(data, mode, store_size, dictionary): (c_kwargs, d_kwargs) = setup_kwargs(mode, store_size) - data_in = [data for i in range(32)] + def copy_buf(data): + if isinstance(data, memoryview): + data_x = memoryview(copy.deepcopy(data.obj)) + elif isinstance(data, bytearray): + data_x = bytearray() + data_x[:] = data + else: + data_x = data + return data_x + + data_in = [copy_buf(data) for i in range(32)] pool = ThreadPool(2) rt = partial(roundtrip, c_kwargs=c_kwargs, diff --git a/tests/block/test_block_3.py b/tests/block/test_block_3.py index 3fcb175b..88461b7a 100644 --- a/tests/block/test_block_3.py +++ b/tests/block/test_block_3.py @@ -18,6 +18,7 @@ def data(request): return request.param +@pytest.mark.thread_unsafe def test_block_decompress_mem_usage(data): tracemalloc = pytest.importorskip('tracemalloc') diff --git a/tests/frame/test_frame_2.py b/tests/frame/test_frame_2.py index 80b44b87..230867e6 100644 --- a/tests/frame/test_frame_2.py +++ b/tests/frame/test_frame_2.py @@ -1,6 +1,7 @@ import lz4.frame as lz4frame import pytest import os +import copy import sys from . helpers import ( get_chunked, @@ -41,6 +42,13 @@ def test_roundtrip_chunked(data, block_size, block_linked, data, c_chunks, d_chunks = data + if isinstance(data, memoryview): + data = memoryview(copy.deepcopy(data.obj)) + elif isinstance(data, bytearray): + data_2 = bytearray() + data_2[:] = data + data = data_2 + c_context = lz4frame.create_compression_context() kwargs = {} diff --git a/tests/frame/test_frame_5.py b/tests/frame/test_frame_5.py index 05daf283..dcbe4aea 100644 --- a/tests/frame/test_frame_5.py +++ b/tests/frame/test_frame_5.py @@ -8,6 +8,8 @@ (b'a' * 1024 * 1024), ] +pytestmark = pytest.mark.thread_unsafe + @pytest.fixture( params=test_data, diff --git a/tests/frame/test_frame_6.py b/tests/frame/test_frame_6.py index c20a4f31..4f4185ee 100644 --- a/tests/frame/test_frame_6.py +++ b/tests/frame/test_frame_6.py @@ -1,5 +1,6 @@ import os import pytest +import threading import lz4.frame as lz4frame test_data = [ @@ -33,40 +34,45 @@ def compression_level(request): return request.param -def test_lz4frame_open_write(data): - with lz4frame.open('testfile', mode='wb') as fp: +def test_lz4frame_open_write(tmp_path, data): + thread_id = threading.get_native_id() + with lz4frame.open(tmp_path / f'testfile_{thread_id}', mode='wb') as fp: fp.write(data) -def test_lz4frame_open_write_read_defaults(data): - with lz4frame.open('testfile', mode='wb') as fp: +def test_lz4frame_open_write_read_defaults(tmp_path, data): + thread_id = threading.get_native_id() + with lz4frame.open(tmp_path / f'testfile_{thread_id}', mode='wb') as fp: fp.write(data) - with lz4frame.open('testfile', mode='r') as fp: + with lz4frame.open(tmp_path / f'testfile_{thread_id}', mode='r') as fp: data_out = fp.read() assert data_out == data -def test_lz4frame_open_write_read_text(): +def test_lz4frame_open_write_read_text(tmp_path): data = u'This is a test string' - with lz4frame.open('testfile', mode='wt') as fp: + thread_id = threading.get_native_id() + with lz4frame.open(tmp_path / f'testfile_{thread_id}', mode='wt') as fp: fp.write(data) - with lz4frame.open('testfile', mode='rt') as fp: + with lz4frame.open(tmp_path / f'testfile_{thread_id}', mode='rt') as fp: data_out = fp.read() assert data_out == data -def test_lz4frame_open_write_read_text_iter(): +def test_lz4frame_open_write_read_text_iter(tmp_path): data = u'This is a test string' - with lz4frame.open('testfile', mode='wt') as fp: + thread_id = threading.get_native_id() + with lz4frame.open(tmp_path / f'testfile_{thread_id}', mode='wt') as fp: fp.write(data) data_out = '' - with lz4frame.open('testfile', mode='rt') as fp: + with lz4frame.open(tmp_path / f'testfile_{thread_id}', mode='rt') as fp: for line in fp: data_out += line assert data_out == data def test_lz4frame_open_write_read( + tmp_path, data, compression_level, block_linked, @@ -91,29 +97,31 @@ def test_lz4frame_open_write_read( kwargs['return_bytearray'] = return_bytearray kwargs['mode'] = 'wb' - with lz4frame.open('testfile', **kwargs) as fp: + thread_id = threading.get_native_id() + with lz4frame.open(tmp_path / f'testfile_{thread_id}', **kwargs) as fp: fp.write(data) - with lz4frame.open('testfile', mode='r') as fp: + with lz4frame.open(tmp_path / f'testfile_{thread_id}', mode='r') as fp: data_out = fp.read() assert data_out == data -def test_lz4frame_flush(): +def test_lz4frame_flush(tmp_path): data_1 = b"This is a..." data_2 = b" test string!" + thread_id = threading.get_native_id() - with lz4frame.open("testfile", mode="w") as fp_write: + with lz4frame.open(tmp_path / f"testfile_{thread_id}", mode="w") as fp_write: fp_write.write(data_1) fp_write.flush() fp_write.write(data_2) - with lz4frame.open("testfile", mode="r") as fp_read: + with lz4frame.open(tmp_path / f"testfile_{thread_id}", mode="r") as fp_read: assert fp_read.read() == data_1 fp_write.flush() - with lz4frame.open("testfile", mode="r") as fp_read: + with lz4frame.open(tmp_path / f"testfile_{thread_id}", mode="r") as fp_read: assert fp_read.read() == data_1 + data_2 diff --git a/tests/frame/test_frame_8.py b/tests/frame/test_frame_8.py index 159534ae..cfaeaace 100644 --- a/tests/frame/test_frame_8.py +++ b/tests/frame/test_frame_8.py @@ -1,12 +1,14 @@ +import threading import lz4.frame as lz4frame -def test_lz4frame_open_write_read_text_iter(): +def test_lz4frame_open_write_read_text_iter(tmp_path): data = u'This is a test string' - with lz4frame.open('testfile', mode='wt') as fp: + thread_id = threading.get_native_id() + with lz4frame.open(tmp_path / f'testfile_{thread_id}', mode='wt') as fp: fp.write(data) data_out = '' - with lz4frame.open('testfile', mode='rt') as fp: + with lz4frame.open(tmp_path / f'testfile_{thread_id}', mode='rt') as fp: for line in fp: data_out += line assert data_out == data diff --git a/tests/frame/test_frame_9.py b/tests/frame/test_frame_9.py index 51433934..c5335aed 100644 --- a/tests/frame/test_frame_9.py +++ b/tests/frame/test_frame_9.py @@ -3,11 +3,12 @@ import io import pickle import sys +import threading import lz4.frame import pytest -def test_issue_172_1(): +def test_issue_172_1(tmp_path): """Test reproducer for issue 172 Issue 172 is a reported failure occurring on Windows 10 only. This bug was @@ -16,34 +17,38 @@ def test_issue_172_1(): """ input_data = 8 * os.urandom(1024) - with lz4.frame.open('testfile_small', 'wb') as fp: + thread_id = threading.get_native_id() + + with lz4.frame.open(tmp_path / f'testfile_small_{thread_id}', 'wb') as fp: bytes_written = fp.write(input_data) # noqa: F841 - with lz4.frame.open('testfile_small', 'rb') as fp: + with lz4.frame.open(tmp_path / f'testfile_small_{thread_id}', 'rb') as fp: data = fp.read(10) assert len(data) == 10 -def test_issue_172_2(): +def test_issue_172_2(tmp_path): input_data = 9 * os.urandom(1024) - with lz4.frame.open('testfile_small', 'w') as fp: + thread_id = threading.get_native_id() + with lz4.frame.open(tmp_path / f'testfile_small_{thread_id}', 'w') as fp: bytes_written = fp.write(input_data) # noqa: F841 - with lz4.frame.open('testfile_small', 'r') as fp: + with lz4.frame.open(tmp_path / f'testfile_small_{thread_id}', 'r') as fp: data = fp.read(10) assert len(data) == 10 -def test_issue_172_3(): +def test_issue_172_3(tmp_path): input_data = 9 * os.urandom(1024) - with lz4.frame.open('testfile_small', 'wb') as fp: + thread_id = threading.get_native_id() + with lz4.frame.open(tmp_path / f'testfile_small_{thread_id}', 'wb') as fp: bytes_written = fp.write(input_data) # noqa: F841 - with lz4.frame.open('testfile_small', 'rb') as fp: + with lz4.frame.open(tmp_path / f'testfile_small_{thread_id}', 'rb') as fp: data = fp.read(10) assert len(data) == 10 - with lz4.frame.open('testfile_small', 'rb') as fp: + with lz4.frame.open(tmp_path / f'testfile_small_{thread_id}', 'rb') as fp: data = fp.read(16 * 1024 - 1) assert len(data) == 9 * 1024 assert data == input_data diff --git a/tests/stream/test_stream_0.py b/tests/stream/test_stream_0.py index 03b19f3f..cac07bdd 100644 --- a/tests/stream/test_stream_0.py +++ b/tests/stream/test_stream_0.py @@ -96,6 +96,7 @@ def setup_kwargs(strategy, mode, buffer_size, store_comp_size, # Test single threaded usage with all valid variations of input +@pytest.mark.thread_unsafe def test_1(data, strategy, mode, buffer_size, store_comp_size, c_return_bytearray, d_return_bytearray, dictionary): if buffer_size >= (1 << (8 * store_comp_size['store_comp_size'])): diff --git a/tests/stream/test_stream_3.py b/tests/stream/test_stream_3.py index 2b52d6b5..fed93d2c 100644 --- a/tests/stream/test_stream_3.py +++ b/tests/stream/test_stream_3.py @@ -71,6 +71,7 @@ def data(request): return request.param +@pytest.mark.thread_unsafe def test_block_decompress_mem_usage(data, buffer_size): kwargs = { 'strategy': "double_buffer",