Skip to content

Backport many of the SS:GB 8 changes to run on 7 #478

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 9 commits into from
Jun 28, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .github/workflows/imports.yml
Original file line number Diff line number Diff line change
Expand Up @@ -54,5 +54,7 @@ jobs:
python-version: ${{ needs.rngs.outputs.pyver }}
# python-version: ${{ matrix.python-version }}
- run: python -m pip install --upgrade pip
# - run: pip install --pre suitesparse-graphblas # Use if we need pre-release
- run: pip install -e .[default]
- run: ./scripts/test_imports.sh
- name: Run test imports
run: ./scripts/test_imports.sh
2 changes: 1 addition & 1 deletion .github/workflows/publish_pypi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ jobs:
- name: Check with twine
run: python -m twine check --strict dist/*
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@v1.8.6
uses: pypa/gh-action-pypi-publish@v1.8.7
with:
user: __token__
password: ${{ secrets.PYPI_TOKEN }}
21 changes: 12 additions & 9 deletions .github/workflows/test_and_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ jobs:
shell: bash -l {0}
strategy:
# To "stress test" in CI, set `fail-fast` to `false` and perhaps add more items to `matrix.slowtask`
fail-fast: false # Every service seems super-flaky right now...
fail-fast: true
# The build matrix is [os]x[slowtask] and then randomly chooses [pyver] and [sourcetype].
# This should ensure we'll have full code coverage (i.e., no chance of getting unlucky),
# since we need to run all slow tests on Windows and non-Windoes OSes.
Expand Down Expand Up @@ -170,25 +170,25 @@ jobs:
nxver=$(python -c 'import random ; print(random.choice(["=2.7", "=2.8", "=3.0", "=3.1", ""]))')
yamlver=$(python -c 'import random ; print(random.choice(["=5.4", "=6.0", ""]))')
sparsever=$(python -c 'import random ; print(random.choice(["=0.13", "=0.14", ""]))')
fmmver=$(python -c 'import random ; print(random.choice(["=1.4", "=1.5", "=1.6", ""]))')
fmmver=$(python -c 'import random ; print(random.choice(["=1.4", "=1.5", "=1.6", "=1.7", ""]))')
if [[ ${{ startsWith(steps.pyver.outputs.selected, '3.8') }} == true ]]; then
npver=$(python -c 'import random ; print(random.choice(["=1.21", "=1.22", "=1.23", "=1.24", ""]))')
spver=$(python -c 'import random ; print(random.choice(["=1.8", "=1.9", "=1.10", ""]))')
pdver=$(python -c 'import random ; print(random.choice(["=1.2", "=1.3", "=1.4", "=1.5", "=2.0", ""]))')
akver=$(python -c 'import random ; print(random.choice(["=1.9", "=1.10", "=2.0", "=2.1", "=2.2", ""]))')
elif [[ ${{ startsWith(steps.pyver.outputs.selected, '3.9') }} == true ]]; then
npver=$(python -c 'import random ; print(random.choice(["=1.21", "=1.22", "=1.23", "=1.24", "=1.25", ""]))')
spver=$(python -c 'import random ; print(random.choice(["=1.8", "=1.9", "=1.10", ""]))')
spver=$(python -c 'import random ; print(random.choice(["=1.8", "=1.9", "=1.10", "=1.11", ""]))')
pdver=$(python -c 'import random ; print(random.choice(["=1.2", "=1.3", "=1.4", "=1.5", "=2.0", ""]))')
akver=$(python -c 'import random ; print(random.choice(["=1.9", "=1.10", "=2.0", "=2.1", "=2.2", ""]))')
elif [[ ${{ startsWith(steps.pyver.outputs.selected, '3.10') }} == true ]]; then
npver=$(python -c 'import random ; print(random.choice(["=1.21", "=1.22", "=1.23", "=1.24", "=1.25", ""]))')
spver=$(python -c 'import random ; print(random.choice(["=1.8", "=1.9", "=1.10", ""]))')
spver=$(python -c 'import random ; print(random.choice(["=1.8", "=1.9", "=1.10", "=1.11", ""]))')
pdver=$(python -c 'import random ; print(random.choice(["=1.3", "=1.4", "=1.5", "=2.0", ""]))')
akver=$(python -c 'import random ; print(random.choice(["=1.9", "=1.10", "=2.0", "=2.1", "=2.2", ""]))')
else # Python 3.11
npver=$(python -c 'import random ; print(random.choice(["=1.23", "=1.24", "=1.25", ""]))')
spver=$(python -c 'import random ; print(random.choice(["=1.9", "=1.10", ""]))')
spver=$(python -c 'import random ; print(random.choice(["=1.9", "=1.10", "=1.11", ""]))')
pdver=$(python -c 'import random ; print(random.choice(["=1.5", "=2.0", ""]))')
akver=$(python -c 'import random ; print(random.choice(["=1.10", "=2.0", "=2.1", "=2.2", ""]))')
fi
Expand All @@ -204,20 +204,20 @@ jobs:
# But, it's still useful for us to test with different versions!
psg=""
if [[ ${{ steps.sourcetype.outputs.selected}} == "conda-forge" ]] ; then
psgver=$(python -c 'import random ; print(random.choice(["=7.4.0", "=7.4.1", "=7.4.2", "=7.4.3.0", "=7.4.3.1", "=7.4.3.2", ""]))')
psgver=$(python -c 'import random ; print(random.choice(["=7.4.0", "=7.4.1", "=7.4.2", "=7.4.3.0", "=7.4.3.1", "=7.4.3.2"]))')
psg=python-suitesparse-graphblas${psgver}
elif [[ ${{ steps.sourcetype.outputs.selected}} == "wheel" ]] ; then
psgver=$(python -c 'import random ; print(random.choice(["==7.4.3.2", ""]))')
psgver=$(python -c 'import random ; print(random.choice(["==7.4.3.2"]))')
elif [[ ${{ steps.sourcetype.outputs.selected}} == "source" ]] ; then
# These should be exact versions
psgver=$(python -c 'import random ; print(random.choice(["==7.4.0.0", "==7.4.1.0", "==7.4.2.0", "==7.4.3.0", "==7.4.3.1", "==7.4.3.2", ""]))')
psgver=$(python -c 'import random ; print(random.choice(["==7.4.0.0", "==7.4.1.0", "==7.4.2.0", "==7.4.3.0", "==7.4.3.1", "==7.4.3.2"]))')
else
psgver=""
fi
if [[ ${npver} == "=1.25" ]] ; then
numbaver=""
if [[ ${spver} == "=1.8" ]] ; then
spver=$(python -c 'import random ; print(random.choice(["=1.9", "=1.10", ""]))')
spver=$(python -c 'import random ; print(random.choice(["=1.9", "=1.10", "=1.11", ""]))')
fi
elif [[ ${npver} == "=1.24" || ${{ startsWith(steps.pyver.outputs.selected, '3.11') }} == true ]] ; then
numbaver=$(python -c 'import random ; print(random.choice(["=0.57", ""]))')
Expand Down Expand Up @@ -374,6 +374,9 @@ jobs:
# Tests lazy loading of lib, ffi, and NULL in gb.core
echo "from graphblas.core import base" > script.py
coverage run -a script.py
# Test another code pathway for loading lib
echo "from graphblas.core import lib" > script.py
coverage run -a script.py
rm script.py
# Tests whose coverage depend on order of tests :/
# TODO: understand why these are order-dependent and try to fix
Expand Down
16 changes: 8 additions & 8 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ repos:
- id: check-added-large-files
- id: check-case-conflict
- id: check-merge-conflict
- id: check-symlinks
# - id: check-symlinks
- id: check-ast
- id: check-toml
- id: check-yaml
Expand All @@ -39,7 +39,7 @@ repos:
name: Validate pyproject.toml
# I don't yet trust ruff to do what autoflake does
- repo: https://github.com/PyCQA/autoflake
rev: v2.1.1
rev: v2.2.0
hooks:
- id: autoflake
args: [--in-place]
Expand All @@ -51,7 +51,7 @@ repos:
- id: isort
# Let's keep `pyupgrade` even though `ruff --fix` probably does most of it
- repo: https://github.com/asottile/pyupgrade
rev: v3.4.0
rev: v3.7.0
hooks:
- id: pyupgrade
args: [--py38-plus]
Expand All @@ -66,7 +66,7 @@ repos:
- id: black
- id: black-jupyter
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.270
rev: v0.0.275
hooks:
- id: ruff
args: [--fix-only, --show-fixes]
Expand All @@ -79,22 +79,22 @@ repos:
additional_dependencies: &flake8_dependencies
# These versions need updated manually
- flake8==6.0.0
- flake8-bugbear==23.5.9
- flake8-bugbear==23.6.5
- flake8-simplify==0.20.0
- repo: https://github.com/asottile/yesqa
rev: v1.4.0
rev: v1.5.0
hooks:
- id: yesqa
additional_dependencies: *flake8_dependencies
- repo: https://github.com/codespell-project/codespell
rev: v2.2.4
rev: v2.2.5
hooks:
- id: codespell
types_or: [python, rst, markdown]
additional_dependencies: [tomli]
files: ^(graphblas|docs)/
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.270
rev: v0.0.275
hooks:
- id: ruff
- repo: https://github.com/sphinx-contrib/sphinx-lint
Expand Down
2 changes: 1 addition & 1 deletion docs/env.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ dependencies:
# python-graphblas dependencies
- donfig
- numba
- python-suitesparse-graphblas>=7.4.0.0
- python-suitesparse-graphblas>=7.4.0.0,<8
- pyyaml
# extra dependencies
- matplotlib
Expand Down
2 changes: 2 additions & 0 deletions graphblas/binary/ss.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from ..core import operator

_delayed = {}

del operator
6 changes: 3 additions & 3 deletions graphblas/core/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,7 @@ def _update(self, expr, mask=None, accum=None, replace=False, input_mask=None, *
return
if opts:
# Ignore opts for now
descriptor_lookup(**opts)
desc = descriptor_lookup(**opts)
self.value = expr
return

Expand All @@ -371,7 +371,7 @@ def _update(self, expr, mask=None, accum=None, replace=False, input_mask=None, *
else:
if opts:
# Ignore opts for now
descriptor_lookup(**opts)
desc = descriptor_lookup(**opts)
self.value = expr
return
else:
Expand Down Expand Up @@ -571,7 +571,7 @@ def _new(self, dtype, mask, name, is_cscalar=None, **opts):
):
if opts:
# Ignore opts for now
descriptor_lookup(**opts)
desc = descriptor_lookup(**opts) # noqa: F841 (keep desc in scope for context)
if self._is_scalar and self._value._is_cscalar != is_cscalar:
return self._value.dup(is_cscalar=is_cscalar, name=name)
rv = self._value
Expand Down
1 change: 1 addition & 0 deletions graphblas/core/descriptor.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ def __init__(
self.mask_structure = mask_structure
self.transpose_first = transpose_first
self.transpose_second = transpose_second
self._context = None # Used by SuiteSparse:GraphBLAS 8

@property
def _carg(self):
Expand Down
2 changes: 1 addition & 1 deletion graphblas/core/expr.py
Original file line number Diff line number Diff line change
Expand Up @@ -421,7 +421,7 @@ def _setitem(self, resolved_indexes, obj, *, is_submask):
# Fast path using assignElement
if self.opts:
# Ignore opts for now
descriptor_lookup(**self.opts)
desc = descriptor_lookup(**self.opts) # noqa: F841 (keep desc in scope for context)
self.parent._assign_element(resolved_indexes, obj)
else:
mask = self.kwargs.get("mask")
Expand Down
4 changes: 2 additions & 2 deletions graphblas/core/matrix.py
Original file line number Diff line number Diff line change
Expand Up @@ -665,7 +665,7 @@ def dup(self, dtype=None, *, clear=False, mask=None, name=None, **opts):
else:
if opts:
# Ignore opts for now
descriptor_lookup(**opts)
desc = descriptor_lookup(**opts) # noqa: F841 (keep desc in scope for context)
new_mat = ffi_new("GrB_Matrix*")
rv = Matrix._from_obj(new_mat, self.dtype, self._nrows, self._ncols, name=name)
call("GrB_Matrix_dup", [_Pointer(rv), self])
Expand Down Expand Up @@ -2707,7 +2707,7 @@ def _extract_element(
result = Scalar(dtype, is_cscalar=is_cscalar, name=name)
if opts:
# Ignore opts for now
descriptor_lookup(**opts)
desc = descriptor_lookup(**opts) # noqa: F841 (keep desc in scope for context)
if is_cscalar:
dtype_name = "UDT" if dtype._is_udt else dtype.name
if (
Expand Down
6 changes: 3 additions & 3 deletions graphblas/core/operator/binary.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,10 @@
UINT16,
UINT32,
UINT64,
_sample_values,
_supports_complex,
lookup_dtype,
)
from ...dtypes._core import _sample_values
from ...exceptions import UdfParseError, check_status_carg
from .. import _has_numba, _supports_udfs, ffi, lib
from ..expr import InfixExprBase
Expand Down Expand Up @@ -506,7 +506,7 @@ def binary_wrapper(z, x, y): # pragma: no cover (numba)
type_.gb_obj,
),
"BinaryOp",
new_binary,
new_binary[0],
)
op = TypedUserBinaryOp(new_type_obj, name, type_, ret_type, new_binary[0])
new_type_obj._add(op)
Expand Down Expand Up @@ -611,7 +611,7 @@ def binary_wrapper(z_ptr, x_ptr, y_ptr): # pragma: no cover (numba)
new_binary, binary_wrapper.cffi, ret_type._carg, dtype._carg, dtype2._carg
),
"BinaryOp",
new_binary,
new_binary[0],
)
op = TypedUserBinaryOp(
self,
Expand Down
7 changes: 4 additions & 3 deletions graphblas/core/operator/indexunary.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
from types import FunctionType

from ... import _STANDARD_OPERATOR_NAMES, indexunary, select
from ...dtypes import BOOL, FP64, INT8, INT64, UINT64, _sample_values, lookup_dtype
from ...dtypes import BOOL, FP64, INT8, INT64, UINT64, lookup_dtype
from ...dtypes._core import _sample_values
from ...exceptions import UdfParseError, check_status_carg
from .. import _has_numba, ffi, lib
from .base import OpBase, ParameterizedUdf, TypedOpBase, _call_op, _deserialize_parameterized
Expand Down Expand Up @@ -193,7 +194,7 @@ def indexunary_wrapper(z, x, row, col, y): # pragma: no cover (numba)
type_.gb_obj,
),
"IndexUnaryOp",
new_indexunary,
new_indexunary[0],
)
op = cls._typed_user_class(new_type_obj, name, type_, ret_type, new_indexunary[0])
new_type_obj._add(op)
Expand Down Expand Up @@ -225,7 +226,7 @@ def _compile_udt(self, dtype, dtype2):
new_indexunary, indexunary_wrapper.cffi, ret_type._carg, dtype._carg, dtype2._carg
),
"IndexUnaryOp",
new_indexunary,
new_indexunary[0],
)
op = TypedUserIndexUnaryOp(
self,
Expand Down
4 changes: 2 additions & 2 deletions graphblas/core/operator/semiring.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ def _build(cls, name, monoid, binaryop, *, anonymous=False):
check_status_carg(
lib.GrB_Semiring_new(new_semiring, monoid[binary_out].gb_obj, binary_func.gb_obj),
"Semiring",
new_semiring,
new_semiring[0],
)
ret_type = monoid[binary_out].return_type
op = TypedUserSemiring(
Expand All @@ -254,7 +254,7 @@ def _compile_udt(self, dtype, dtype2):
ret_type = monoid.return_type
new_semiring = ffi_new("GrB_Semiring*")
status = lib.GrB_Semiring_new(new_semiring, monoid.gb_obj, binaryop.gb_obj)
check_status_carg(status, "Semiring", new_semiring)
check_status_carg(status, "Semiring", new_semiring[0])
op = TypedUserSemiring(
new_semiring,
self.name,
Expand Down
6 changes: 3 additions & 3 deletions graphblas/core/operator/unary.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@
UINT16,
UINT32,
UINT64,
_sample_values,
_supports_complex,
lookup_dtype,
)
from ...dtypes._core import _sample_values
from ...exceptions import UdfParseError, check_status_carg
from .. import _has_numba, ffi, lib
from ..utils import output_type
Expand Down Expand Up @@ -239,7 +239,7 @@ def unary_wrapper(z, x):
new_unary, unary_wrapper.cffi, ret_type.gb_obj, type_.gb_obj
),
"UnaryOp",
new_unary,
new_unary[0],
)
op = TypedUserUnaryOp(new_type_obj, name, type_, ret_type, new_unary[0])
new_type_obj._add(op)
Expand All @@ -264,7 +264,7 @@ def _compile_udt(self, dtype, dtype2):
check_status_carg(
lib.GrB_UnaryOp_new(new_unary, unary_wrapper.cffi, ret_type._carg, dtype._carg),
"UnaryOp",
new_unary,
new_unary[0],
)
op = TypedUserUnaryOp(self, self.name, dtype, ret_type, new_unary[0])
self._udt_types[dtype] = ret_type
Expand Down
Loading